From d28bbf1d121769f3d4c6952603873577bcb5ccf7 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 17 Jan 2025 13:39:05 +0000 Subject: [PATCH 1/2] Remove the old SPA context --- src/murfey/client/analyser.py | 26 +----- src/murfey/client/contexts/spa.py | 105 +--------------------- src/murfey/client/instance_environment.py | 9 -- src/murfey/client/multigrid_control.py | 29 +----- src/murfey/client/tui/app.py | 29 +----- src/murfey/client/tui/screens.py | 32 ++----- src/murfey/server/api/__init__.py | 11 --- src/murfey/server/demo_api.py | 7 -- src/murfey/util/config.py | 1 - src/murfey/util/models.py | 4 - 10 files changed, 15 insertions(+), 238 deletions(-) diff --git a/src/murfey/client/analyser.py b/src/murfey/client/analyser.py index c18b06fe4..7910ca170 100644 --- a/src/murfey/client/analyser.py +++ b/src/murfey/client/analyser.py @@ -16,7 +16,7 @@ from murfey.client.context import Context from murfey.client.contexts.clem import CLEMContext -from murfey.client.contexts.spa import SPAContext, SPAModularContext +from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.spa_metadata import SPAMetadataContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.instance_environment import MurfeyInstanceEnvironment @@ -159,23 +159,7 @@ def _find_context(self, file_path: Path) -> bool: if split_file_name[0].startswith("FoilHole"): if not self._context: logger.info("Acquisition software: EPU") - if self._environment: - try: - cfg = get_machine_config_client( - str(self._environment.url.geturl()), - instrument_name=self._environment.instrument_name, - demo=self._environment.demo, - ) - except Exception as e: - logger.error(f"Exception encountered: {e}") - cfg = {} - else: - cfg = {} - self._context = ( - SPAModularContext("epu", self._basepath) - if cfg.get("modular_spa") - else SPAContext("epu", self._basepath) - ) + self._context = SPAModularContext("epu", self._basepath) self.parameters_model = ProcessingParametersSPA return True @@ -325,8 +309,7 @@ def _analyse(self): "form": dc_metadata, "dependencies": ( spa_form_dependencies - if isinstance(self._context, SPAContext) - or isinstance( + if isinstance( self._context, SPAModularContext ) else {} @@ -385,8 +368,7 @@ def _analyse(self): "form": dc_metadata, "dependencies": ( spa_form_dependencies - if isinstance(self._context, SPAContext) - or isinstance( + if isinstance( self._context, SPAModularContext ) else {} diff --git a/src/murfey/client/contexts/spa.py b/src/murfey/client/contexts/spa.py index 65cf34a35..3790f792e 100644 --- a/src/murfey/client/contexts/spa.py +++ b/src/murfey/client/contexts/spa.py @@ -285,7 +285,7 @@ def _get_xml_list_index(key: str, xml_list: list) -> int: raise ValueError(f"Key not found in XML list: {key}") -class _SPAContext(Context): +class SPAModularContext(Context): user_params = [ ProcessingParameter( "dose_per_frame", @@ -558,8 +558,6 @@ def gather_metadata( ) or True return metadata - -class SPAModularContext(_SPAContext): def _position_analysis( self, transferred_file: Path, @@ -856,104 +854,3 @@ def _launch_spa_pipeline( url: str = "", ): return - - -class SPAContext(_SPAContext): - def _register_data_collection( - self, - tag: str, - url: str, - data: dict, - environment: MurfeyInstanceEnvironment, - ): - logger.info(f"registering data collection with data {data}") - environment.id_tag_registry["data_collection"].append(tag) - image_directory = str(environment.default_destinations[Path(tag)]) - json = { - "voltage": data["voltage"], - "pixel_size_on_image": data["pixel_size_on_image"], - "experiment_type": data["experiment_type"], - "image_size_x": data["image_size_x"], - "image_size_y": data["image_size_y"], - "file_extension": data["file_extension"], - "acquisition_software": data["acquisition_software"], - "image_directory": image_directory, - "tag": tag, - "source": tag, - "magnification": data["magnification"], - "total_exposed_dose": data.get("total_exposed_dose"), - "c2aperture": data.get("c2aperture"), - "exposure_time": data.get("exposure_time"), - "slit_width": data.get("slit_width"), - "phase_plate": data.get("phase_plate", False), - } - capture_post(url, json=json) - - def post_transfer( - self, - transferred_file: Path, - environment: MurfeyInstanceEnvironment | None = None, - **kwargs, - ) -> bool: - return True - - def _register_processing_job( - self, - tag: str, - environment: MurfeyInstanceEnvironment, - parameters: Dict[str, Any] | None = None, - ): - logger.info(f"registering processing job with parameters: {parameters}") - parameters = parameters or {} - environment.id_tag_registry["processing_job"].append(tag) - proc_url = f"{str(environment.url.geturl())}/visits/{environment.visit}/{environment.murfey_session}/register_processing_job" - machine_config = get_machine_config_client( - str(environment.url.geturl()), - instrument_name=environment.instrument_name, - demo=environment.demo, - ) - image_directory = str( - Path(machine_config.get("rsync_basepath", ".")) - / environment.default_destinations[Path(tag)] - ) - if self._acquisition_software == "epu": - import_images = f"{Path(image_directory).resolve()}/GridSquare*/Data/*{parameters['file_extension']}" - else: - import_images = ( - f"{Path(image_directory).resolve()}/*{parameters['file_extension']}" - ) - msg: Dict[str, Any] = { - "tag": tag, - "source": tag, - "recipe": "ispyb-relion", - "parameters": { - "acquisition_software": parameters["acquisition_software"], - "voltage": parameters["voltage"], - "gain_ref": parameters["gain_ref"], - "dose_per_frame": parameters["dose_per_frame"], - "eer_grouping": parameters["eer_fractionation"], - "import_images": import_images, - "angpix": float(parameters["pixel_size_on_image"]) * 1e10, - "symmetry": parameters["symmetry"], - "boxsize": parameters["boxsize"], - "downscale": parameters["downscale"], - "small_boxsize": parameters["small_boxsize"], - "mask_diameter": parameters["mask_diameter"], - "use_cryolo": parameters["use_cryolo"], - "estimate_particle_diameter": parameters["estimate_particle_diameter"], - }, - } - if parameters["particle_diameter"]: - msg["parameters"]["particle_diameter"] = parameters["particle_diameter"] - capture_post(proc_url, json=msg) - - def _launch_spa_pipeline( - self, - tag: str, - jobid: int, - environment: MurfeyInstanceEnvironment, - url: str = "", - ): - environment.id_tag_registry["auto_proc_program"].append(tag) - data = {"job_id": jobid} - capture_post(url, json=data) diff --git a/src/murfey/client/instance_environment.py b/src/murfey/client/instance_environment.py index 95af5074c..db83581b7 100644 --- a/src/murfey/client/instance_environment.py +++ b/src/murfey/client/instance_environment.py @@ -86,15 +86,6 @@ def dc_callback(cls, v, values): l(k) return v - @validator("processing_job_ids") - def job_callback(cls, v, values): - with global_env_lock: - for l in values.get("listeners", {}).get("processing_job_ids", []): - for k in v.keys(): - if k not in values["id_tag_registry"]["auto_proc_program"]: - l(k, v[k]["ispyb-relion"]) - return v - @validator("autoproc_program_ids") def app_callback(cls, v, values): # logger.info(f"autoproc program ids validator: {v}") diff --git a/src/murfey/client/multigrid_control.py b/src/murfey/client/multigrid_control.py index 2d590956b..f64796827 100644 --- a/src/murfey/client/multigrid_control.py +++ b/src/murfey/client/multigrid_control.py @@ -13,7 +13,7 @@ import murfey.client.websocket from murfey.client.analyser import Analyser -from murfey.client.contexts.spa import SPAContext, SPAModularContext +from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.client.rsync import RSyncer, RSyncerUpdate, TransferResult @@ -443,7 +443,7 @@ def _start_dc(self, json, from_form: bool = False): ) log.info("tomography processing flushed") - elif isinstance(context, SPAContext) or isinstance(context, SPAModularContext): + elif isinstance(context, SPAModularContext): url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group" dcg_data = { "experiment_type": "single particle", @@ -515,31 +515,6 @@ def _start_dc(self, json, from_form: bool = False): f"{self._environment.url.geturl()}/visits/{self._environment.visit}/{self.session_id}/flush_spa_processing", json={"tag": str(source)}, ) - if isinstance(context, SPAContext): - url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/start_data_collection" - self._environment.listeners["data_collection_group_ids"] = { - partial( - context._register_data_collection, - url=url, - data=json, - environment=self._environment, - ) - } - self._environment.listeners["data_collection_ids"] = { - partial( - context._register_processing_job, - parameters=json, - environment=self._environment, - ) - } - url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/spa_processing" - self._environment.listeners["processing_job_ids"] = { - partial( - context._launch_spa_pipeline, - url=url, - environment=self._environment, - ) - } def _increment_file_count( self, observed_files: List[Path], source: str, destination: str diff --git a/src/murfey/client/tui/app.py b/src/murfey/client/tui/app.py index 9e3b3fdd9..8b43cc5c7 100644 --- a/src/murfey/client/tui/app.py +++ b/src/murfey/client/tui/app.py @@ -15,7 +15,7 @@ from textual.widgets import Button, Input from murfey.client.analyser import Analyser -from murfey.client.contexts.spa import SPAContext, SPAModularContext +from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.client.rsync import RSyncer, RSyncerUpdate, TransferResult @@ -552,7 +552,7 @@ def _start_dc(self, json, from_form: bool = False): json={"rsync_source": str(source)}, ) log.info("tomography processing flushed") - elif isinstance(context, SPAContext) or isinstance(context, SPAModularContext): + elif isinstance(context, SPAModularContext): url = f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/register_data_collection_group" dcg_data = { "experiment_type": "single particle", @@ -629,31 +629,6 @@ def _start_dc(self, json, from_form: bool = False): f"{self.app._environment.url.geturl()}/visits/{self.app._environment.visit}/{self.app._environment.murfey_session}/flush_spa_processing", json={"tag": str(source)}, ) - if isinstance(context, SPAContext): - url = f"{str(self._url.geturl())}/visits/{str(self._visit)}/{self._environment.murfey_session}/start_data_collection" - self._environment.listeners["data_collection_group_ids"] = { - partial( - context._register_data_collection, - url=url, - data=json, - environment=self._environment, - ) - } - self._environment.listeners["data_collection_ids"] = { - partial( - context._register_processing_job, - parameters=json, - environment=self._environment, - ) - } - url = f"{str(self._url.geturl())}/visits/{str(self._visit)}/spa_processing" - self._environment.listeners["processing_job_ids"] = { - partial( - context._launch_spa_pipeline, - url=url, - environment=self._environment, - ) - } def _set_request_destination(self, response: str): if response == "y": diff --git a/src/murfey/client/tui/screens.py b/src/murfey/client/tui/screens.py index 5a79650a5..2d278bb88 100644 --- a/src/murfey/client/tui/screens.py +++ b/src/murfey/client/tui/screens.py @@ -47,7 +47,7 @@ from werkzeug.utils import secure_filename from murfey.client.analyser import Analyser, spa_form_dependencies -from murfey.client.contexts.spa import SPAContext, SPAModularContext +from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.gain_ref import determine_gain_ref from murfey.client.instance_environment import ( @@ -257,18 +257,8 @@ def __init__( super().__init__(*args, **kwargs) self._selected_dir = basepath self._add_basepath = add_basepath - cfg = get_machine_config_client( - str(self.app._environment.url.geturl()), - instrument_name=self.app._environment.instrument_name, - demo=self.app._environment.demo, - ) - self._context: ( - Type[SPAModularContext] | Type[SPAContext] | Type[TomographyContext] - ) - if cfg.get("modular_spa"): - self._context = SPAContext - else: - self._context = SPAModularContext + self._context: Type[SPAModularContext] | Type[TomographyContext] + self._context = SPAModularContext def compose(self): machine_data = requests.get( @@ -975,7 +965,7 @@ class DestinationSelect(Screen): def __init__( self, transfer_routes: Dict[Path, str], - context: Type[SPAContext] | Type[SPAModularContext] | Type[TomographyContext], + context: Type[SPAModularContext] | Type[TomographyContext], *args, dependencies: Dict[str, FormDependency] | None = None, destination_overrides: Optional[Dict[Path, str]] = None, @@ -994,9 +984,7 @@ def __init__( def compose(self): bulk = [] with RadioSet(): - yield RadioButton( - "SPA", value=self._context in (SPAContext, SPAModularContext) - ) + yield RadioButton("SPA", value=self._context is SPAModularContext) yield RadioButton("Tomography", value=self._context is TomographyContext) if self.app._multigrid: machine_config = get_machine_config_client( @@ -1142,15 +1130,7 @@ def on_switch_changed(self, event): def on_radio_set_changed(self, event: RadioSet.Changed) -> None: if event.index == 0: - cfg = get_machine_config_client( - str(self.app._environment.url.geturl()), - instrument_name=self.app._environment.instrument_name, - demo=self.app._environment.demo, - ) - if cfg.get("modular_spa"): - self._context = SPAContext - else: - self._context = SPAModularContext + self._context = SPAModularContext else: self._context = TomographyContext self.app.pop_screen() diff --git a/src/murfey/server/api/__init__.py b/src/murfey/server/api/__init__.py index d6232f9ac..a34ea7a8b 100644 --- a/src/murfey/server/api/__init__.py +++ b/src/murfey/server/api/__init__.py @@ -98,7 +98,6 @@ Sample, SessionInfo, SPAProcessFile, - SPAProcessingParameters, SuggestedPathParameters, TiltInfo, TiltSeriesGroupInfo, @@ -1058,16 +1057,6 @@ async def send_murfey_message(instrument_name: str, msg: RegistrationMessage): ) -@router.post("/visits/{visit_name}/spa_processing") -async def request_spa_processing(visit_name: str, proc_params: SPAProcessingParameters): - zocalo_message = { - "parameters": {"ispyb_process": proc_params.job_id}, - "recipes": ["ispyb-relion"], - } - if _transport_object: - _transport_object.send("processing_recipe", zocalo_message) - - @router.post("/visits/{visit_name}/{session_id}/spa_preprocess") async def request_spa_preprocessing( visit_name: str, diff --git a/src/murfey/server/demo_api.py b/src/murfey/server/demo_api.py index 5439d53c2..cea1fcdfd 100644 --- a/src/murfey/server/demo_api.py +++ b/src/murfey/server/demo_api.py @@ -83,7 +83,6 @@ RsyncerSource, SessionInfo, SPAProcessFile, - SPAProcessingParameters, SuggestedPathParameters, TiltInfo, TiltSeriesGroupInfo, @@ -926,12 +925,6 @@ async def send_murfey_message(msg: RegistrationMessage): pass -@router.post("/visits/{visit_name}/spa_processing") -async def request_spa_processing(visit_name: str, proc_params: SPAProcessingParameters): - log.info("SPA processing requested") - return proc_params - - class Tag(BaseModel): tag: str diff --git a/src/murfey/util/config.py b/src/murfey/util/config.py index 00750c806..ca8c65ed3 100644 --- a/src/murfey/util/config.py +++ b/src/murfey/util/config.py @@ -36,7 +36,6 @@ class MachineConfig(BaseModel, extra=Extra.allow): # type: ignore camera: str = "FALCON" data_required_substrings: Dict[str, Dict[str, List[str]]] = {} allow_removal: bool = False - modular_spa: bool = False data_transfer_enabled: bool = True processing_enabled: bool = True machine_override: str = "" diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index e8dd8cac3..62b398c80 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -190,10 +190,6 @@ class MillingParameters(BaseModel): """ -class SPAProcessingParameters(BaseModel): - job_id: int - - class SPAProcessFile(BaseModel): tag: str path: str From 11977b495fe34e9844afcdb6cb1266ecbaaee9e9 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 17 Jan 2025 14:54:10 +0000 Subject: [PATCH 2/2] Pretty sure none of this is used --- src/murfey/client/contexts/spa.py | 1 - src/murfey/client/contexts/tomo.py | 91 ----------------------- src/murfey/client/instance_environment.py | 46 +----------- src/murfey/client/multigrid_control.py | 3 - src/murfey/util/models.py | 3 - 5 files changed, 2 insertions(+), 142 deletions(-) diff --git a/src/murfey/client/contexts/spa.py b/src/murfey/client/contexts/spa.py index 3790f792e..154a4c848 100644 --- a/src/murfey/client/contexts/spa.py +++ b/src/murfey/client/contexts/spa.py @@ -326,7 +326,6 @@ def __init__(self, acquisition_software: str, basepath: Path): super().__init__("SPA", acquisition_software) self._basepath = basepath self._processing_job_stash: dict = {} - self._preprocessing_triggers: dict = {} self._foil_holes: Dict[int, List[int]] = {} def gather_metadata( diff --git a/src/murfey/client/contexts/tomo.py b/src/murfey/client/contexts/tomo.py index f09dad951..da9ea9b9c 100644 --- a/src/murfey/client/contexts/tomo.py +++ b/src/murfey/client/contexts/tomo.py @@ -8,7 +8,6 @@ import requests import xmltodict -from pydantic import BaseModel import murfey.util.eer from murfey.client.context import Context, ProcessingParameter @@ -17,7 +16,6 @@ MovieTracker, MurfeyID, MurfeyInstanceEnvironment, - global_env_lock, ) from murfey.util import authorised_requests, capture_post, get_machine_config_client from murfey.util.mdoc import get_block, get_global_data, get_num_blocks @@ -65,15 +63,6 @@ def _construct_tilt_series_name(file_path: Path) -> str: return "_".join(split_name[:-5]) -class ProcessFileIncomplete(BaseModel): - dest: Path - source: Path - image_number: int - mc_uuid: int - tag: str - description: str = "" - - class TomographyContext(Context): user_params = [ ProcessingParameter( @@ -101,7 +90,6 @@ def __init__(self, acquisition_software: str, basepath: Path): self._aligned_tilt_series: List[str] = [] self._data_collection_stash: list = [] self._processing_job_stash: dict = {} - self._preprocessing_triggers: dict = {} self._lock: RLock = RLock() def _flush_data_collections(self): @@ -120,12 +108,6 @@ def _flush_data_collections(self): capture_post(dc_data[0], json=data) self._data_collection_stash = [] - def _flush_processing_job(self, tag: str): - if proc_data := self._processing_job_stash.get(tag): - for pd in proc_data: - requests.post(pd[0], json=pd[1]) - self._processing_job_stash.pop(tag) - def _flush_processing_jobs(self): logger.info( f"Flushing {len(self._processing_job_stash.keys())} processing job API calls" @@ -135,75 +117,6 @@ def _flush_processing_jobs(self): requests.post(pd[0], json=pd[1]) self._processing_job_stash = {} - def _flush_preprocess(self, tag: str, app_id: int): - if tag_tr := self._preprocessing_triggers.get(tag): - for tr in tag_tr: - process_file = self._complete_process_file(tr[1], tr[2], app_id) - if process_file: - capture_post(tr[0], json=process_file) - self._preprocessing_triggers.pop(tag) - - def _complete_process_file( - self, - incomplete_process_file: ProcessFileIncomplete, - environment: MurfeyInstanceEnvironment, - app_id: int, - ) -> dict: - try: - with global_env_lock: - tag = incomplete_process_file.tag - - eer_fractionation_file = None - if environment.data_collection_parameters.get("num_eer_frames"): - response = requests.post( - f"{str(environment.url.geturl())}/visits/{environment.visit}/{environment.murfey_session}/eer_fractionation_file", - json={ - "num_frames": environment.data_collection_parameters[ - "num_eer_frames" - ], - "fractionation": environment.data_collection_parameters[ - "eer_fractionation" - ], - "dose_per_frame": environment.data_collection_parameters[ - "dose_per_frame" - ], - "fractionation_file_name": "eer_fractionation_tomo.txt", - }, - ) - eer_fractionation_file = response.json()["eer_fractionation_file"] - - new_dict = { - "path": str(incomplete_process_file.dest), - "description": incomplete_process_file.description, - "size": incomplete_process_file.source.stat().st_size, - "timestamp": incomplete_process_file.source.stat().st_ctime, - "processing_job": environment.processing_job_ids[tag][ - "em-tomo-preprocess" - ], - "data_collection_id": environment.data_collection_ids[tag], - "image_number": incomplete_process_file.image_number, - "pixel_size": environment.data_collection_parameters[ - "pixel_size_on_image" - ], - "autoproc_program_id": app_id, - "mc_uuid": incomplete_process_file.mc_uuid, - "dose_per_frame": environment.data_collection_parameters.get( - "dose_per_frame" - ), - "mc_binning": environment.data_collection_parameters.get( - "motion_corr_binning", 1 - ), - "gain_ref": environment.data_collection_parameters.get("gain_ref"), - "voltage": environment.data_collection_parameters.get( - "voltage", 300 - ), - "eer_fractionation_file": eer_fractionation_file, - } - return new_dict - except KeyError: - logger.warning("Key error encountered in _complete_process_file") - return {} - def _file_transferred_to( self, environment: MurfeyInstanceEnvironment, source: Path, file_path: Path ): @@ -441,14 +354,10 @@ def _add_tilt( preproc_data = { "path": str(file_transferred_to), "description": "", - "data_collection_id": environment.data_collection_ids.get(tilt_series), "image_number": environment.movies[file_transferred_to].movie_number, "pixel_size": environment.data_collection_parameters.get( "pixel_size_on_image", 0 ), - "autoproc_program_id": environment.autoproc_program_ids.get( - tilt_series, {} - ).get("em-tomo-preprocess"), "dose_per_frame": environment.data_collection_parameters.get( "dose_per_frame", 0 ), diff --git a/src/murfey/client/instance_environment.py b/src/murfey/client/instance_environment.py index db83581b7..a677a20ac 100644 --- a/src/murfey/client/instance_environment.py +++ b/src/murfey/client/instance_environment.py @@ -5,10 +5,10 @@ from itertools import count from pathlib import Path from threading import RLock -from typing import Callable, Dict, List, NamedTuple, Optional, Set +from typing import Dict, List, NamedTuple, Optional from urllib.parse import ParseResult -from pydantic import BaseModel, validator +from pydantic import BaseModel from murfey.client.watchdir import DirWatcher @@ -42,18 +42,8 @@ class MurfeyInstanceEnvironment(BaseModel): watchers: Dict[Path, DirWatcher] = {} demo: bool = False data_collection_group_ids: Dict[str, int] = {} - data_collection_ids: Dict[str, int] = {} - processing_job_ids: Dict[str, Dict[str, int]] = {} - autoproc_program_ids: Dict[str, Dict[str, int]] = {} - id_tag_registry: Dict[str, List[str]] = { - "data_collection_group": [], - "data_collection": [], - "processing_job": [], - "auto_proc_program": [], - } data_collection_parameters: dict = {} movies: Dict[Path, MovieTracker] = {} - listeners: Dict[str, Set[Callable]] = {} movie_tilt_pair: Dict[Path, str] = {} tilt_angles: Dict[str, List[List[str]]] = {} movie_counters: Dict[str, itertools.count] = {} @@ -68,34 +58,6 @@ class Config: validate_assignment: bool = True arbitrary_types_allowed: bool = True - @validator("data_collection_group_ids") - def dcg_callback(cls, v, values): - with global_env_lock: - for l in values.get("listeners", {}).get("data_collection_group_ids", []): - for k in v.keys(): - if k not in values["id_tag_registry"]["data_collection"]: - l(k) - return v - - @validator("data_collection_ids") - def dc_callback(cls, v, values): - with global_env_lock: - for l in values.get("listeners", {}).get("data_collection_ids", []): - for k in v.keys(): - if k not in values["id_tag_registry"]["processing_job"]: - l(k) - return v - - @validator("autoproc_program_ids") - def app_callback(cls, v, values): - # logger.info(f"autoproc program ids validator: {v}") - with global_env_lock: - for l in values.get("listeners", {}).get("autoproc_program_ids", []): - for k in v.keys(): - if v[k].get("em-tomo-preprocess"): - l(k, v[k]["em-tomo-preprocess"]) - return v - def clear(self): self.sources = [] self.default_destinations = {} @@ -103,12 +65,8 @@ def clear(self): w.stop() self.watchers = {} self.data_collection_group_ids = {} - self.data_collection_ids = {} - self.processing_job_ids = {} - self.autoproc_program_ids = {} self.data_collection_parameters = {} self.movies = {} - self.listeners = {} self.movie_tilt_pair = {} self.tilt_angles = {} self.visit = "" diff --git a/src/murfey/client/multigrid_control.py b/src/murfey/client/multigrid_control.py index f64796827..5df495814 100644 --- a/src/murfey/client/multigrid_control.py +++ b/src/murfey/client/multigrid_control.py @@ -370,9 +370,6 @@ def _start_dc(self, json, from_form: bool = False): source = Path(json["source"]) - self._environment.id_tag_registry["data_collection_group"].append( - str(source) - ) url = f"{str(self._environment.url.geturl())}/visits/{str(self._environment.visit)}/{self.session_id}/register_data_collection_group" dcg_data = { "experiment_type": "tomo", diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 62b398c80..5421641aa 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -312,12 +312,9 @@ class ProcessFile(BaseModel): # Rename to TomoProcessFile path: str description: str tag: str - data_collection_id: Optional[int] image_number: int pixel_size: float dose_per_frame: float - processing_job: Optional[int] = None - autoproc_program_id: Optional[int] = None mc_uuid: Optional[int] = None voltage: float = 300 mc_binning: int = 1