Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/murfey/client/contexts/spa.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ def _position_analysis(
grid_square = _grid_square_from_file(transferred_file)
grid_square_metadata_file = _grid_square_metadata_file(
transferred_file,
machine_config["data_directories"],
[Path(p) for p in machine_config["data_directories"]],
environment.visit,
grid_square,
)
Expand Down Expand Up @@ -921,6 +921,7 @@ def _register_processing_job(
)
msg: Dict[str, Any] = {
"tag": tag,
"source": tag,
"recipe": "ispyb-relion",
"parameters": {
"acquisition_software": parameters["acquisition_software"],
Expand Down
11 changes: 1 addition & 10 deletions src/murfey/client/contexts/spa_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import requests
import xmltodict
from PIL import Image

from murfey.client.context import Context
from murfey.client.contexts.spa import _get_grid_square_atlas_positions, _get_source
Expand Down Expand Up @@ -85,17 +84,9 @@ def post_transfer(
atlas_original_pixel_size = atlas_xml_data["MicroscopeImage"][
"SpatialScale"
]["pixelSize"]["x"]["numericValue"]
readout_width = float(
atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][
"numericValue"
]
)

# need to calculate the pixel size of the downscaled image
atlas_im = Image.open(atlas_xml_path.with_suffix(".jpg"))
atlas_pixel_size = atlas_original_pixel_size * (
readout_width / atlas_im.width
)
atlas_pixel_size = atlas_original_pixel_size * 7.8

source = _get_source(
visitless_path.parent / "Images-Disc1" / visitless_path.name,
Expand Down
4 changes: 4 additions & 0 deletions src/murfey/client/contexts/tomo.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,7 @@ def _add_tilt(
proc_url,
{
"tag": tilt_series,
"source": str(self._basepath),
"recipe": "em-tomo-preprocess",
"experiment_type": "tomography",
},
Expand All @@ -385,6 +386,7 @@ def _add_tilt(
proc_url,
{
"tag": tilt_series,
"source": str(self._basepath),
"recipe": "em-tomo-align",
"experiment_type": "tomography",
},
Expand All @@ -396,6 +398,7 @@ def _add_tilt(
proc_url,
json={
"tag": tilt_series,
"source": str(self._basepath),
"recipe": "em-tomo-preprocess",
"experiment_type": "tomography",
},
Expand All @@ -404,6 +407,7 @@ def _add_tilt(
proc_url,
json={
"tag": tilt_series,
"source": str(self._basepath),
"recipe": "em-tomo-align",
"experiment_type": "tomography",
},
Expand Down
90 changes: 80 additions & 10 deletions src/murfey/client/multigrid_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,16 +366,9 @@ def _start_dc(self, json, from_form: bool = False):
f"{self._environment.url.geturl()}/clients/{self._environment.client_id}/tomography_processing_parameters",
json=json,
)

source = Path(json["source"])
self._environment.listeners["data_collection_group_ids"] = {
context._flush_data_collections
}
self._environment.listeners["data_collection_ids"] = {
context._flush_processing_job
}
self._environment.listeners["autoproc_program_ids"] = {
context._flush_preprocess
}

self._environment.id_tag_registry["data_collection_group"].append(
str(source)
)
Expand All @@ -386,12 +379,85 @@ def _start_dc(self, json, from_form: bool = False):
"tag": str(source),
}
requests.post(url, json=dcg_data)

data = {
"voltage": json["voltage"],
"pixel_size_on_image": json["pixel_size_on_image"],
"experiment_type": json["experiment_type"],
"image_size_x": json["image_size_x"],
"image_size_y": json["image_size_y"],
"file_extension": json["file_extension"],
"acquisition_software": json["acquisition_software"],
"image_directory": str(self._environment.default_destinations[source]),
"tag": json["tilt_series_tag"],
"source": str(source),
"magnification": json["magnification"],
"total_exposed_dose": json.get("total_exposed_dose"),
"c2aperture": json.get("c2aperture"),
"exposure_time": json.get("exposure_time"),
"slit_width": json.get("slit_width"),
"phase_plate": json.get("phase_plate", False),
}
capture_post(
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/start_data_collection",
json=data,
)
for recipe in ("em-tomo-preprocess", "em-tomo-align"):
capture_post(
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self._environment.murfey_session}/register_processing_job",
json={
"tag": json["tilt_series_tag"],
"source": str(source),
"recipe": recipe,
},
)
log.info("Registering tomography processing parameters")
if self._environment.data_collection_parameters.get("num_eer_frames"):
eer_response = requests.post(
f"{str(self._environment.url.geturl())}/visits/{self._environment.visit}/{self._environment.murfey_session}/eer_fractionation_file",
json={
"num_frames": self._environment.data_collection_parameters[
"num_eer_frames"
],
"fractionation": self._environment.data_collection_parameters[
"eer_fractionation"
],
"dose_per_frame": self._environment.data_collection_parameters[
"dose_per_frame"
],
"fractionation_file_name": "eer_fractionation_tomo.txt",
},
)
eer_fractionation_file = eer_response.json()["eer_fractionation_file"]
json.update({"eer_fractionation_file": eer_fractionation_file})
requests.post(
f"{self._environment.url.geturl()}/sessions/{self._environment.murfey_session}/tomography_preprocessing_parameters",
json=json,
)
context._flush_data_collections()
context._flush_processing_jobs()
capture_post(
f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self._environment.murfey_session}/flush_tomography_processing",
json={"rsync_source": str(source)},
)
log.info("tomography processing flushed")

elif isinstance(context, SPAContext) or isinstance(context, SPAModularContext):
url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group"
dcg_data = {
"experiment_type": "single particle",
"experiment_type_id": 37,
"tag": str(source),
"atlas": (
str(self._environment.samples[source].atlas)
if self._environment.samples.get(source)
else ""
),
"sample": (
self._environment.samples[source].sample
if self._environment.samples.get(source)
else None
),
}
capture_post(url, json=dcg_data)
if from_form:
Expand Down Expand Up @@ -428,7 +494,11 @@ def _start_dc(self, json, from_form: bool = False):
):
capture_post(
f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_processing_job",
json={"tag": str(source), "recipe": recipe},
json={
"tag": str(source),
"source": str(source),
"recipe": recipe,
},
)
log.info(f"Posting SPA processing parameters: {json}")
response = capture_post(
Expand Down
12 changes: 10 additions & 2 deletions src/murfey/client/tui/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,11 @@ def _start_dc(self, json, from_form: bool = False):
for recipe in ("em-tomo-preprocess", "em-tomo-align"):
capture_post(
f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job",
json={"tag": json["tilt_series_tag"], "recipe": recipe},
json={
"tag": json["tilt_series_tag"],
"source": str(source),
"recipe": recipe,
},
)
log.info("Registering tomography processing parameters")
if self.app._environment.data_collection_parameters.get("num_eer_frames"):
Expand Down Expand Up @@ -600,7 +604,11 @@ def _start_dc(self, json, from_form: bool = False):
):
capture_post(
f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_processing_job",
json={"tag": str(source), "recipe": recipe},
json={
"tag": str(source),
"source": str(source),
"recipe": recipe,
},
)
log.info(f"Posting SPA processing parameters: {json}")
response = capture_post(
Expand Down
6 changes: 5 additions & 1 deletion src/murfey/client/tui/screens.py
Original file line number Diff line number Diff line change
Expand Up @@ -717,7 +717,11 @@ def on_button_pressed(self, event: Button.Pressed):
if self._switch_status:
self.app.install_screen(
DirectorySelection(
[p for p in machine_data.get("data_directories", []) if p.exists()]
[
p
for p in machine_data.get("data_directories", [])
if Path(p).exists()
]
),
"directory-select",
)
Expand Down
26 changes: 12 additions & 14 deletions src/murfey/server/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2470,19 +2470,16 @@ def _save_bfactor(message: dict, _db=murfey_db, demo: bool = False):
_transport_object.send(
"ispyb_connector",
{
"parameters": {
"ispyb_command": "buffer",
"buffer_lookup": {
"particle_classification_id": refined_class_uuid,
},
"buffer_command": {
"ispyb_command": "insert_particle_classification"
},
"program_id": message["program_id"],
"bfactor_fit_intercept": str(bfactor_fitting[1]),
"bfactor_fit_linear": str(bfactor_fitting[0]),
"ispyb_command": "buffer",
"buffer_lookup": {
"particle_classification_id": refined_class_uuid,
},
"buffer_command": {
"ispyb_command": "insert_particle_classification"
},
"content": {"dummy": "dummy"},
"program_id": message["program_id"],
"bfactor_fit_intercept": str(bfactor_fitting[1]),
"bfactor_fit_linear": str(bfactor_fitting[0]),
},
new_connection=True,
)
Expand Down Expand Up @@ -2638,7 +2635,9 @@ def feedback_callback(header: dict, message: dict) -> None:
cassetteSlot=message.get("sample"),
)
if _transport_object:
atlas_id = _transport_object.do_insert_atlas(atlas_record)
atlas_id = _transport_object.do_insert_atlas(atlas_record)[
"return_value"
]
murfey_dcg = db.DataCollectionGroup(
id=dcgid,
atlas_id=atlas_id,
Expand Down Expand Up @@ -2755,7 +2754,6 @@ def feedback_callback(header: dict, message: dict) -> None:
elif message["register"] == "processing_job":
murfey_session_id = message["session_id"]
logger.info("registering processing job")
assert isinstance(global_state["data_collection_ids"], dict)
dc = murfey_db.exec(
select(db.DataCollection, db.DataCollectionGroup)
.where(db.DataCollection.dcg_id == db.DataCollectionGroup.id)
Expand Down
54 changes: 36 additions & 18 deletions src/murfey/server/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import sqlalchemy
from fastapi import APIRouter, Depends, Request
from fastapi.responses import FileResponse, HTMLResponse
from ispyb.sqlalchemy import Atlas
from ispyb.sqlalchemy import AutoProcProgram as ISPyBAutoProcProgram
from ispyb.sqlalchemy import (
BLSample,
Expand Down Expand Up @@ -611,7 +612,7 @@ def register_foil_hole(
except Exception:
if _transport_object:
fh_ispyb_response = _transport_object.do_insert_foil_hole(
gsid.id, gs.thumbnail_size_x / gs.readout_area_x, foil_hole_params
gs.id, gs.thumbnail_size_x / gs.readout_area_x, foil_hole_params
)
else:
fh_ispyb_response = {"success": False, "return_value": None}
Expand Down Expand Up @@ -1339,7 +1340,7 @@ def suggest_path(
check_path = machine_config.rsync_basepath / base_path

# Check previous year to account for the year rolling over during data collection
if not check_path.exists():
if not check_path.parent.exists():
base_path_parts = base_path.split("/")
for part in base_path_parts:
# Find the path part corresponding to the year
Expand All @@ -1351,10 +1352,10 @@ def suggest_path(
check_path = machine_config.rsync_basepath / base_path

# If it's not in the previous year either, it's a genuine error
if not check_path.exists():
if not check_path.parent.exists():
log_message = (
"Unable to find current visit folder under "
f"{str(check_path_prev)!r} or {str(check_path)!r}"
f"{str(check_path_prev.parent)!r} or {str(check_path.parent)!r}"
)
log.error(log_message)
raise FileNotFoundError(log_message)
Expand All @@ -1370,9 +1371,13 @@ def suggest_path(
return {"suggested_path": check_path.relative_to(machine_config.rsync_basepath)}


@router.post("/{session_id}/make_rsyncer_destination")
def make_rsyncer_destination(session_id: int, destination: Path, db=murfey_db):
secure_path_parts = [secure_filename(p) for p in destination.parts]
class Dest(BaseModel):
destination: Path


@router.post("/sessions/{session_id}/make_rsyncer_destination")
def make_rsyncer_destination(session_id: int, destination: Dest, db=murfey_db):
secure_path_parts = [secure_filename(p) for p in destination.destintion.parts]
destination_path = "/".join(secure_path_parts)
instrument_name = (
db.exec(select(Session).where(Session.id == session_id)).one().instrument_name
Expand Down Expand Up @@ -1427,19 +1432,31 @@ def register_dc_group(
dcg_murfey[0].atlas = dcg_params.atlas
dcg_murfey[0].sample = dcg_params.sample
dcg_murfey[0].atlas_pixel_size = dcg_params.atlas_pixel_size

if _transport_object:
if dcg_murfey[0].atlas_id is not None:
_transport_object.send(
_transport_object.feedback_queue,
{
"register": "atlas_update",
"atlas_id": dcg_murfey[0].atlas_id,
"atlas": dcg_params.atlas,
"sample": dcg_params.sample,
"atlas_pixel_size": dcg_params.atlas_pixel_size,
},
)
else:
atlas_id_response = _transport_object.do_insert_atlas(
Atlas(
dataCollectionGroupId=dcg_murfey[0].id,
atlasImage=dcg_params.atlas,
pixelSize=dcg_params.atlas_pixel_size,
cassetteSlot=dcg_params.sample,
)
)
dcg_murfey[0].atlas_id = atlas_id_response["return_value"]
db.add(dcg_murfey[0])
db.commit()
if _transport_object:
_transport_object.send(
_transport_object.feedback_queue,
{
"register": "atlas_update",
"atlas_id": dcg_murfey.atlas_id,
"atlas": dcg_params.atlas,
"sample": dcg_params.sample,
"atlas_pixel_size": dcg_params.atlas_pixel_size,
},
)
else:
dcg_parameters = {
"start_time": str(datetime.datetime.now()),
Expand Down Expand Up @@ -1528,6 +1545,7 @@ def register_proc(
"session_id": session_id,
"experiment_type": proc_params.experiment_type,
"recipe": proc_params.recipe,
"source": proc_params.source,
"tag": proc_params.tag,
"job_parameters": {
k: v for k, v in proc_params.parameters.items() if v not in (None, "None")
Expand Down
Loading
Loading