diff --git a/src/murfey/client/contexts/spa.py b/src/murfey/client/contexts/spa.py index debae379c..70c43dd6c 100644 --- a/src/murfey/client/contexts/spa.py +++ b/src/murfey/client/contexts/spa.py @@ -570,7 +570,7 @@ def _position_analysis( grid_square = _grid_square_from_file(transferred_file) grid_square_metadata_file = _grid_square_metadata_file( transferred_file, - machine_config["data_directories"], + [Path(p) for p in machine_config["data_directories"]], environment.visit, grid_square, ) @@ -921,6 +921,7 @@ def _register_processing_job( ) msg: Dict[str, Any] = { "tag": tag, + "source": tag, "recipe": "ispyb-relion", "parameters": { "acquisition_software": parameters["acquisition_software"], diff --git a/src/murfey/client/contexts/spa_metadata.py b/src/murfey/client/contexts/spa_metadata.py index 8966a51cc..7cd98b4e6 100644 --- a/src/murfey/client/contexts/spa_metadata.py +++ b/src/murfey/client/contexts/spa_metadata.py @@ -4,7 +4,6 @@ import requests import xmltodict -from PIL import Image from murfey.client.context import Context from murfey.client.contexts.spa import _get_grid_square_atlas_positions, _get_source @@ -85,17 +84,9 @@ def post_transfer( atlas_original_pixel_size = atlas_xml_data["MicroscopeImage"][ "SpatialScale" ]["pixelSize"]["x"]["numericValue"] - readout_width = float( - atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ - "numericValue" - ] - ) # need to calculate the pixel size of the downscaled image - atlas_im = Image.open(atlas_xml_path.with_suffix(".jpg")) - atlas_pixel_size = atlas_original_pixel_size * ( - readout_width / atlas_im.width - ) + atlas_pixel_size = atlas_original_pixel_size * 7.8 source = _get_source( visitless_path.parent / "Images-Disc1" / visitless_path.name, diff --git a/src/murfey/client/contexts/tomo.py b/src/murfey/client/contexts/tomo.py index a57a8300d..e9bcd2aaf 100644 --- a/src/murfey/client/contexts/tomo.py +++ b/src/murfey/client/contexts/tomo.py @@ -375,6 +375,7 @@ def _add_tilt( proc_url, { "tag": tilt_series, + "source": str(self._basepath), "recipe": "em-tomo-preprocess", "experiment_type": "tomography", }, @@ -385,6 +386,7 @@ def _add_tilt( proc_url, { "tag": tilt_series, + "source": str(self._basepath), "recipe": "em-tomo-align", "experiment_type": "tomography", }, @@ -396,6 +398,7 @@ def _add_tilt( proc_url, json={ "tag": tilt_series, + "source": str(self._basepath), "recipe": "em-tomo-preprocess", "experiment_type": "tomography", }, @@ -404,6 +407,7 @@ def _add_tilt( proc_url, json={ "tag": tilt_series, + "source": str(self._basepath), "recipe": "em-tomo-align", "experiment_type": "tomography", }, diff --git a/src/murfey/client/multigrid_control.py b/src/murfey/client/multigrid_control.py index 1790d1e43..9833f2353 100644 --- a/src/murfey/client/multigrid_control.py +++ b/src/murfey/client/multigrid_control.py @@ -366,16 +366,9 @@ def _start_dc(self, json, from_form: bool = False): f"{self._environment.url.geturl()}/clients/{self._environment.client_id}/tomography_processing_parameters", json=json, ) + source = Path(json["source"]) - self._environment.listeners["data_collection_group_ids"] = { - context._flush_data_collections - } - self._environment.listeners["data_collection_ids"] = { - context._flush_processing_job - } - self._environment.listeners["autoproc_program_ids"] = { - context._flush_preprocess - } + self._environment.id_tag_registry["data_collection_group"].append( str(source) ) @@ -386,12 +379,85 @@ def _start_dc(self, json, from_form: bool = False): "tag": str(source), } requests.post(url, json=dcg_data) + + data = { + "voltage": json["voltage"], + "pixel_size_on_image": json["pixel_size_on_image"], + "experiment_type": json["experiment_type"], + "image_size_x": json["image_size_x"], + "image_size_y": json["image_size_y"], + "file_extension": json["file_extension"], + "acquisition_software": json["acquisition_software"], + "image_directory": str(self._environment.default_destinations[source]), + "tag": json["tilt_series_tag"], + "source": str(source), + "magnification": json["magnification"], + "total_exposed_dose": json.get("total_exposed_dose"), + "c2aperture": json.get("c2aperture"), + "exposure_time": json.get("exposure_time"), + "slit_width": json.get("slit_width"), + "phase_plate": json.get("phase_plate", False), + } + capture_post( + f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/start_data_collection", + json=data, + ) + for recipe in ("em-tomo-preprocess", "em-tomo-align"): + capture_post( + f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/register_processing_job", + json={ + "tag": json["tilt_series_tag"], + "source": str(source), + "recipe": recipe, + }, + ) + log.info("Registering tomography processing parameters") + if self._environment.data_collection_parameters.get("num_eer_frames"): + eer_response = requests.post( + f"{str(self._environment.url.geturl())}/visits/{self._environment.visit}/{self._environment.murfey_session}/eer_fractionation_file", + json={ + "num_frames": self._environment.data_collection_parameters[ + "num_eer_frames" + ], + "fractionation": self._environment.data_collection_parameters[ + "eer_fractionation" + ], + "dose_per_frame": self._environment.data_collection_parameters[ + "dose_per_frame" + ], + "fractionation_file_name": "eer_fractionation_tomo.txt", + }, + ) + eer_fractionation_file = eer_response.json()["eer_fractionation_file"] + json.update({"eer_fractionation_file": eer_fractionation_file}) + requests.post( + f"{self._environment.url.geturl()}/sessions/{self._environment.murfey_session}/tomography_preprocessing_parameters", + json=json, + ) + context._flush_data_collections() + context._flush_processing_jobs() + capture_post( + f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self._environment.murfey_session}/flush_tomography_processing", + json={"rsync_source": str(source)}, + ) + log.info("tomography processing flushed") + elif isinstance(context, SPAContext) or isinstance(context, SPAModularContext): url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group" dcg_data = { "experiment_type": "single particle", "experiment_type_id": 37, "tag": str(source), + "atlas": ( + str(self._environment.samples[source].atlas) + if self._environment.samples.get(source) + else "" + ), + "sample": ( + self._environment.samples[source].sample + if self._environment.samples.get(source) + else None + ), } capture_post(url, json=dcg_data) if from_form: @@ -428,7 +494,11 @@ def _start_dc(self, json, from_form: bool = False): ): capture_post( f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_processing_job", - json={"tag": str(source), "recipe": recipe}, + json={ + "tag": str(source), + "source": str(source), + "recipe": recipe, + }, ) log.info(f"Posting SPA processing parameters: {json}") response = capture_post( diff --git a/src/murfey/client/tui/app.py b/src/murfey/client/tui/app.py index 3eb943e4a..9e3b3fdd9 100644 --- a/src/murfey/client/tui/app.py +++ b/src/murfey/client/tui/app.py @@ -516,7 +516,11 @@ def _start_dc(self, json, from_form: bool = False): for recipe in ("em-tomo-preprocess", "em-tomo-align"): capture_post( f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job", - json={"tag": json["tilt_series_tag"], "recipe": recipe}, + json={ + "tag": json["tilt_series_tag"], + "source": str(source), + "recipe": recipe, + }, ) log.info("Registering tomography processing parameters") if self.app._environment.data_collection_parameters.get("num_eer_frames"): @@ -600,7 +604,11 @@ def _start_dc(self, json, from_form: bool = False): ): capture_post( f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job", - json={"tag": str(source), "recipe": recipe}, + json={ + "tag": str(source), + "source": str(source), + "recipe": recipe, + }, ) log.info(f"Posting SPA processing parameters: {json}") response = capture_post( diff --git a/src/murfey/client/tui/screens.py b/src/murfey/client/tui/screens.py index d1911ea05..5a79650a5 100644 --- a/src/murfey/client/tui/screens.py +++ b/src/murfey/client/tui/screens.py @@ -717,7 +717,11 @@ def on_button_pressed(self, event: Button.Pressed): if self._switch_status: self.app.install_screen( DirectorySelection( - [p for p in machine_data.get("data_directories", []) if p.exists()] + [ + p + for p in machine_data.get("data_directories", []) + if Path(p).exists() + ] ), "directory-select", ) diff --git a/src/murfey/server/__init__.py b/src/murfey/server/__init__.py index ffff663fc..fb7d8d32d 100644 --- a/src/murfey/server/__init__.py +++ b/src/murfey/server/__init__.py @@ -2470,19 +2470,16 @@ def _save_bfactor(message: dict, _db=murfey_db, demo: bool = False): _transport_object.send( "ispyb_connector", { - "parameters": { - "ispyb_command": "buffer", - "buffer_lookup": { - "particle_classification_id": refined_class_uuid, - }, - "buffer_command": { - "ispyb_command": "insert_particle_classification" - }, - "program_id": message["program_id"], - "bfactor_fit_intercept": str(bfactor_fitting[1]), - "bfactor_fit_linear": str(bfactor_fitting[0]), + "ispyb_command": "buffer", + "buffer_lookup": { + "particle_classification_id": refined_class_uuid, + }, + "buffer_command": { + "ispyb_command": "insert_particle_classification" }, - "content": {"dummy": "dummy"}, + "program_id": message["program_id"], + "bfactor_fit_intercept": str(bfactor_fitting[1]), + "bfactor_fit_linear": str(bfactor_fitting[0]), }, new_connection=True, ) @@ -2638,7 +2635,9 @@ def feedback_callback(header: dict, message: dict) -> None: cassetteSlot=message.get("sample"), ) if _transport_object: - atlas_id = _transport_object.do_insert_atlas(atlas_record) + atlas_id = _transport_object.do_insert_atlas(atlas_record)[ + "return_value" + ] murfey_dcg = db.DataCollectionGroup( id=dcgid, atlas_id=atlas_id, @@ -2755,7 +2754,6 @@ def feedback_callback(header: dict, message: dict) -> None: elif message["register"] == "processing_job": murfey_session_id = message["session_id"] logger.info("registering processing job") - assert isinstance(global_state["data_collection_ids"], dict) dc = murfey_db.exec( select(db.DataCollection, db.DataCollectionGroup) .where(db.DataCollection.dcg_id == db.DataCollectionGroup.id) diff --git a/src/murfey/server/api/__init__.py b/src/murfey/server/api/__init__.py index 565ca7b05..d0e062416 100644 --- a/src/murfey/server/api/__init__.py +++ b/src/murfey/server/api/__init__.py @@ -11,6 +11,7 @@ import sqlalchemy from fastapi import APIRouter, Depends, Request from fastapi.responses import FileResponse, HTMLResponse +from ispyb.sqlalchemy import Atlas from ispyb.sqlalchemy import AutoProcProgram as ISPyBAutoProcProgram from ispyb.sqlalchemy import ( BLSample, @@ -611,7 +612,7 @@ def register_foil_hole( except Exception: if _transport_object: fh_ispyb_response = _transport_object.do_insert_foil_hole( - gsid.id, gs.thumbnail_size_x / gs.readout_area_x, foil_hole_params + gs.id, gs.thumbnail_size_x / gs.readout_area_x, foil_hole_params ) else: fh_ispyb_response = {"success": False, "return_value": None} @@ -1339,7 +1340,7 @@ def suggest_path( check_path = machine_config.rsync_basepath / base_path # Check previous year to account for the year rolling over during data collection - if not check_path.exists(): + if not check_path.parent.exists(): base_path_parts = base_path.split("/") for part in base_path_parts: # Find the path part corresponding to the year @@ -1351,10 +1352,10 @@ def suggest_path( check_path = machine_config.rsync_basepath / base_path # If it's not in the previous year either, it's a genuine error - if not check_path.exists(): + if not check_path.parent.exists(): log_message = ( "Unable to find current visit folder under " - f"{str(check_path_prev)!r} or {str(check_path)!r}" + f"{str(check_path_prev.parent)!r} or {str(check_path.parent)!r}" ) log.error(log_message) raise FileNotFoundError(log_message) @@ -1370,9 +1371,13 @@ def suggest_path( return {"suggested_path": check_path.relative_to(machine_config.rsync_basepath)} -@router.post("/{session_id}/make_rsyncer_destination") -def make_rsyncer_destination(session_id: int, destination: Path, db=murfey_db): - secure_path_parts = [secure_filename(p) for p in destination.parts] +class Dest(BaseModel): + destination: Path + + +@router.post("/sessions/{session_id}/make_rsyncer_destination") +def make_rsyncer_destination(session_id: int, destination: Dest, db=murfey_db): + secure_path_parts = [secure_filename(p) for p in destination.destintion.parts] destination_path = "/".join(secure_path_parts) instrument_name = ( db.exec(select(Session).where(Session.id == session_id)).one().instrument_name @@ -1427,19 +1432,31 @@ def register_dc_group( dcg_murfey[0].atlas = dcg_params.atlas dcg_murfey[0].sample = dcg_params.sample dcg_murfey[0].atlas_pixel_size = dcg_params.atlas_pixel_size + + if _transport_object: + if dcg_murfey[0].atlas_id is not None: + _transport_object.send( + _transport_object.feedback_queue, + { + "register": "atlas_update", + "atlas_id": dcg_murfey[0].atlas_id, + "atlas": dcg_params.atlas, + "sample": dcg_params.sample, + "atlas_pixel_size": dcg_params.atlas_pixel_size, + }, + ) + else: + atlas_id_response = _transport_object.do_insert_atlas( + Atlas( + dataCollectionGroupId=dcg_murfey[0].id, + atlasImage=dcg_params.atlas, + pixelSize=dcg_params.atlas_pixel_size, + cassetteSlot=dcg_params.sample, + ) + ) + dcg_murfey[0].atlas_id = atlas_id_response["return_value"] db.add(dcg_murfey[0]) db.commit() - if _transport_object: - _transport_object.send( - _transport_object.feedback_queue, - { - "register": "atlas_update", - "atlas_id": dcg_murfey.atlas_id, - "atlas": dcg_params.atlas, - "sample": dcg_params.sample, - "atlas_pixel_size": dcg_params.atlas_pixel_size, - }, - ) else: dcg_parameters = { "start_time": str(datetime.datetime.now()), @@ -1528,6 +1545,7 @@ def register_proc( "session_id": session_id, "experiment_type": proc_params.experiment_type, "recipe": proc_params.recipe, + "source": proc_params.source, "tag": proc_params.tag, "job_parameters": { k: v for k, v in proc_params.parameters.items() if v not in (None, "None") diff --git a/src/murfey/server/ispyb.py b/src/murfey/server/ispyb.py index ce01b2cdc..e1646f07f 100644 --- a/src/murfey/server/ispyb.py +++ b/src/murfey/server/ispyb.py @@ -60,7 +60,7 @@ def __init__(self, transport_type): self.transport = workflows.transport.lookup(transport_type)() self.transport.connect() self.feedback_queue = "" - self.ispyb = ispyb.open() if os.getenv("ISYPB_CREDENTIALS") else None + self.ispyb = ispyb.open() if os.getenv("ISPYB_CREDENTIALS") else None self._connection_callback: Callable | None = None def reconnect(self): @@ -137,23 +137,36 @@ def do_insert_grid_square( ): # most of this is for mypy if ( - grid_square_parameters.height is not None - and grid_square_parameters.width is not None - and grid_square_parameters.pixel_size is not None + grid_square_parameters.pixel_size is not None and grid_square_parameters.thumbnail_size_x is not None - and grid_square_parameters.thumbnail_size_y is not None and grid_square_parameters.readout_area_x is not None - and grid_square_parameters.readout_area_y is not None - and grid_square_parameters.angle is not None ): # currently hard coding the scale factor because of difficulties with # guaranteeing we have the atlas jpg and mrc sizes - grid_square_parameters.height = int(grid_square_parameters.height / 7.8) - grid_square_parameters.width = int(grid_square_parameters.width / 7.8) grid_square_parameters.pixel_size *= ( grid_square_parameters.readout_area_x / grid_square_parameters.thumbnail_size_x ) + grid_square_parameters.height = ( + int(grid_square_parameters.height / 7.8) + if grid_square_parameters.height + else None + ) + grid_square_parameters.width = ( + int(grid_square_parameters.width / 7.8) + if grid_square_parameters.width + else None + ) + grid_square_parameters.x_location = ( + int(grid_square_parameters.x_location / 7.8) + if grid_square_parameters.x_location + else None + ) + grid_square_parameters.y_location = ( + int(grid_square_parameters.y_location / 7.8) + if grid_square_parameters.y_location + else None + ) record = GridSquare( atlasId=atlas_id, gridSquareLabel=grid_square_id, @@ -161,7 +174,7 @@ def do_insert_grid_square( pixelLocationX=grid_square_parameters.x_location, pixelLocationY=grid_square_parameters.y_location, height=grid_square_parameters.height, - weight=grid_square_parameters.width, + width=grid_square_parameters.width, angle=grid_square_parameters.angle, stageLocationX=grid_square_parameters.x_stage_position, stageLocationY=grid_square_parameters.y_stage_position, @@ -201,7 +214,16 @@ def do_update_grid_square( / grid_square_parameters.thumbnail_size_x ) grid_square.gridSquareImage = grid_square_parameters.image - grid_square.pixelLocationX = grid_square_parameters.x_location + grid_square.pixelLocationX = ( + int(grid_square_parameters.x_location / 7.8) + if grid_square_parameters.x_location + else None + ) + grid_square.pixelLocationY = ( + int(grid_square_parameters.y_location / 7.8) + if grid_square_parameters.y_location + else None + ) grid_square.pixelLocationY = grid_square_parameters.y_location grid_square.height = ( int(grid_square_parameters.height / 7.8) @@ -235,18 +257,29 @@ def do_insert_foil_hole( foil_hole_parameters: FoilHoleParameters, ): if ( - foil_hole_parameters.diameter is not None - and foil_hole_parameters.thumbnail_size_x is not None + foil_hole_parameters.thumbnail_size_x is not None and foil_hole_parameters.readout_area_x is not None and foil_hole_parameters.pixel_size is not None ): - foil_hole_parameters.diameter = int( - foil_hole_parameters.diameter * scale_factor - ) foil_hole_parameters.pixel_size *= ( foil_hole_parameters.readout_area_x / foil_hole_parameters.thumbnail_size_x ) + foil_hole_parameters.diameter = ( + int(foil_hole_parameters.diameter * scale_factor) + if foil_hole_parameters.diameter + else None + ) + foil_hole_parameters.x_location = ( + int(foil_hole_parameters.x_location * scale_factor) + if foil_hole_parameters.x_location + else None + ) + foil_hole_parameters.y_location = ( + int(foil_hole_parameters.y_location * scale_factor) + if foil_hole_parameters.y_location + else None + ) record = FoilHole( gridSquareId=grid_square_id, foilHoleLabel=foil_hole_parameters.name, @@ -284,8 +317,16 @@ def do_update_foil_hole( db.query(FoilHole).filter(FoilHole.foilHoleId == foil_hole_id).one() ) foil_hole.foilHoleImage = foil_hole_parameters.image - foil_hole.pixelLocationX = foil_hole_parameters.x_location - foil_hole.pixelLocationY = foil_hole_parameters.y_location + foil_hole.pixelLocationX = ( + int(foil_hole_parameters.x_location * scale_factor) + if foil_hole_parameters.x_location + else None + ) + foil_hole.pixelLocationY = ( + int(foil_hole_parameters.y_location * scale_factor) + if foil_hole_parameters.y_location + else None + ) foil_hole.diameter = ( foil_hole_parameters.diameter * scale_factor if foil_hole_parameters.diameter is not None diff --git a/src/murfey/util/db.py b/src/murfey/util/db.py index 602ecf5a7..6a8ea302a 100644 --- a/src/murfey/util/db.py +++ b/src/murfey/util/db.py @@ -361,6 +361,7 @@ class DataCollectionGroup(SQLModel, table=True): # type: ignore session_id: int = Field(foreign_key="session.id", primary_key=True) tag: str = Field(primary_key=True) atlas_id: Optional[int] = None + atlas_pixel_size: Optional[float] = None atlas: str = "" sample: Optional[int] = None session: Optional[Session] = Relationship(back_populates="data_collection_groups") diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 5b2da4d3f..e8dd8cac3 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -72,6 +72,7 @@ class DCParameters(BaseModel): class ProcessingJobParameters(BaseModel): tag: str + source: str recipe: str parameters: Dict[str, Any] = {} experiment_type: str = "spa"