diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index f90f17c0..305b22f8 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -2,29 +2,33 @@ import logging import re +import shutil import threading +import time +import xml.etree.ElementTree as ET +from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import NamedTuple - -import xmltodict +from typing import Callable, Type, TypeVar from murfey.client.context import Context from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.util.client import capture_post +from murfey.util.models import ( + LamellaSiteInfo, + MillingStepInfo, + MillingSteps, + StagePositionInfo, + StagePositionValues, +) logger = logging.getLogger("murfey.client.contexts.fib") lock = threading.Lock() -class Lamella(NamedTuple): - name: str - number: int - angle: float | None = None - - -class MillingProgress(NamedTuple): +@dataclass +class MillingImage: file: Path timestamp: float @@ -45,6 +49,164 @@ def _number_from_name(name: str) -> int: ) +T = TypeVar("T") + + +def _parse_xml_text( + node: ET.Element, + path: str, + func: Callable[[str], T] | Type, +) -> T | None: + """ + Searches the XML Element using the provided path. If a matching node is found, + and it has a text attribute, processes the text using the provided function. + Otherwise, returns None. + """ + if (match := node.find(path)) is None or (text := match.text) is None: + return None + try: + return func(text) + except (ValueError, TypeError): + logger.error(f"Error parsing XML text {text} at path {path}", exc_info=True) + return None + + +SI_UNITS_KEY = { + # Length + "mm": 1e-3, + "um": 1e-6, + "μm": 1e-6, + "nm": 1e-9, + # Current + "mA": 1e-3, + "uA": 1e-6, + "μA": 1e-6, + "nA": 1e-9, + "pA": 1e-12, + # Voltage + "kV": 1e3, + "mV": 1e-3, + # Time + "ms": 1e-3, + "us": 1e-6, + "μs": 1e-6, + # Miscallenous + "%": 0.01, +} + + +def _parse_measurement(text: str): + """ + The measurements in the ProjectData.dat file are stored in a human-readable format + as strings. This helper function converts them into their base SI unit and returns + the value as a float. + + E.g. 5 um will be parsed as 0.000005 + """ + try: + value, unit = (s.strip() for s in text.split(" ", 1)) + return float(value) * SI_UNITS_KEY.get(unit, 1) + except ValueError: + logger.warning(f"Could not parse {value} as a measurement") + return None + + +def _parse_boolean(text: str): + """ + Parses the XML element's text field and returns it as a Python boolean + """ + if text.strip().lower() in ("true", "t", "1"): + return True + elif text.strip().lower() in ("false", "f", "0"): + return False + else: + logger.warning(f"Could not parse {text} as a boolean") + return None + + +MILLING_STEP_NAMES = { + # Map unique activity name to class attribute + # Preparation stage + "Preparation - Eucentric Tilt": "eucentric_tilt", + "Preparation - Artificial Features": "artificial_features", + "Preparation - Milling Angle": "milling_angle", + "Preparation - Image Acquisition": "image_acquisition", + "Preparation - Lamella Placement": "lamella_placement", + # Milling stage + "Milling - Delay": "delay_1", + "Milling - Reference Definition": "reference_definition", + "Milling - Electron Reference Definition": "reference_definition_electron", + "Milling - Stress Relief Cuts": "stress_relief_cuts", + "Milling - Reference Redefinition 1": "reference_redefinition_1", + "Milling - Rough Milling": "rough_milling", + "Milling - Rough Milling - Electron Image": "rough_milling_electron", + "Milling - Reference Redefinition 2": "reference_redefinition_2", + "Milling - Medium Milling": "medium_milling", + "Milling - Medium Milling - Electron Image": "medium_milling_electron", + "Milling - Fine Milling": "fine_milling", + "Milling - Fine Milling - Electron Image": "fine_milling_electron", + "Milling - Finer Milling": "finer_milling", + "Milling - Finer Milling - Electron Image": "finer_milling_electron", + # Thinning stage + "Thinning - Delay": "delay_2", + "Thinning - Polishing 1": "polishing_1", + "Thinning - Polishing 1 - Electron Image": "polishing_1_electron", + "Thinning - Polishing 2": "polishing_2", + "Thinning - Polishing 2 - Ion Image": "polishing_2_ion", + "Thinning - Polishing 2 - Electron Image": "polishing_2_electron", +} + + +STAGE_POSITION_VALUES = { + # Map class attribute to element name + # Paths are relative to the "StagePosition" node + "x": "X", + "y": "Y", + "z": "Z", + "rotation": "R", + "tilt_alpha": "AT", +} + + +STAGE_POSITION_NAMES = { + # Map class attribute to element name + # Paths are relative to the "Site" node + "preparation": "PreparationSiteLocation/StagePosition/StagePosition", + "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", + "chunk": "ChunkSiteLocation/StagePosition/StagePosition", + "thinning_1": "Parameters/ThinningStagePosition/StagePosition", + "thinning_2": "ThinningSiteLocation/StagePosition/StagePosition", +} + + +ACTIVITY_FIELD_MAP = ( + # Model field name | Path relative to "Activity" | Function to apply + # These are relative to the "Activity" node + # Common parameters + ("is_enabled", "IsEnabled", _parse_boolean), + ("status", "ActivityMetadata/ExecutionResult", str), + ("execution_time", "ExecutionTime", _parse_measurement), + # Milling/Imaging beam parameters + ("site_location_type", "SiteLocationType", str), + ("beam_type", "MillingPreset/BeamType", str), + ("beam_type", "BeamPreset/BeamType", str), + ("voltage", "MillingPreset/HighVoltage", _parse_measurement), + ("voltage", "BeamPreset/HighVoltage", _parse_measurement), + ("current", "MillingPreset/BeamCurrent", _parse_measurement), + ("current", "BeamPreset/BeamCurrent", _parse_measurement), + # Milling parameters + ("depth_correction", "DepthCorrection", float), + ("milling_angle", "MillingAngle", _parse_measurement), + ("lamella_offset", "OffsetFromLamella", _parse_measurement), + ("trench_height_front", "FrontTrenchHeight", _parse_measurement), + ("trench_height_rear", "RearTrenchHeight", _parse_measurement), + ("width_overlap_front_left", "LamellaFrontLeftWidthOverlap", _parse_measurement), + ("width_overlap_front_right", "LamellaFrontRightWidthOverlap", _parse_measurement), + ("width_overlap_rear_left", "LamellaRearLeftWidthOverlap", _parse_measurement), + ("width_overlap_rear_right", "LamellaRearRightWidthOverlap", _parse_measurement), +) + + def _get_source(file_path: Path, environment: MurfeyInstanceEnvironment) -> Path | None: """ Returns the Path of the file on the client PC. @@ -84,8 +246,8 @@ def __init__( super().__init__("FIBContext", acquisition_software, token) self._basepath = basepath self._machine_config = machine_config - self._milling: dict[int, list[MillingProgress]] = {} - self._lamellae: dict[int, Lamella] = {} + self._site_info: dict[int, LamellaSiteInfo] = {} + self._drift_correction_images: dict[int, list[MillingImage]] = {} def post_transfer( self, @@ -103,7 +265,45 @@ def post_transfer( # ----------------------------------------------------------------------------- if self._acquisition_software == "autotem": parts = transferred_file.parts - if "DCImages" in parts and transferred_file.suffix == ".png": + if transferred_file.name == "ProjectData.dat": + logger.info(f"Found metadata file {transferred_file} for parsing") + + # Create a backup copy of the file + backup_file = ( + transferred_file.parent + / f"{transferred_file.stem}-{time.time_ns()}{transferred_file.suffix}" + ) + shutil.copyfile( + transferred_file, + backup_file, + ) + logger.info(f"Saved snapshot of the metadata file as {backup_file}") + + # Parse the metadata file + all_site_info_new = self._parse_autotem_metadata(transferred_file) + for site_num, site_info_new in all_site_info_new.items(): + # Post the data to the backend if it's been changed + if ( + data := site_info_new.model_dump(exclude_none=True) + ) != self._site_info.get(site_num, LamellaSiteInfo()).model_dump( + exclude_none=True + ): + capture_post( + base_url=str(environment.url.geturl()), + router_name="workflow_fib.router", + function_name="register_fib_milling_progress", + token=self._token, + instrument_name=environment.instrument_name, + data=data, + # Endpoint kwargs + session_id=environment.murfey_session, + ) + + # Update existing dict + self._site_info[site_num] = site_info_new + logger.info(f"Updating metadata for site {site_num}") + + elif "DCImages" in parts and transferred_file.suffix == ".png": lamella_name = parts[parts.index("Sites") + 1] lamella_number = _number_from_name(lamella_name) time_from_name = transferred_file.name.split("-")[:6] @@ -117,11 +317,6 @@ def post_transfer( second=int(time_from_name[5]), ) ) - if not self._lamellae.get(lamella_number): - self._lamellae[lamella_number] = Lamella( - name=lamella_name, - number=lamella_number, - ) if not (source := _get_source(transferred_file, environment)): logger.warning(f"No source found for file {transferred_file}") return @@ -139,16 +334,16 @@ def post_transfer( f"File {transferred_file.name!r} not found on storage system" ) return - if not self._milling.get(lamella_number): - self._milling[lamella_number] = [ - MillingProgress( + if not self._drift_correction_images.get(lamella_number): + self._drift_correction_images[lamella_number] = [ + MillingImage( timestamp=timestamp, file=destination_file, ) ] else: - self._milling[lamella_number].append( - MillingProgress( + self._drift_correction_images[lamella_number].append( + MillingImage( timestamp=timestamp, file=destination_file, ) @@ -156,7 +351,8 @@ def post_transfer( gif_list = [ l.file for l in sorted( - self._milling[lamella_number], key=lambda x: x.timestamp + self._drift_correction_images[lamella_number], + key=lambda x: x.timestamp, ) ] raw_directory = Path( @@ -169,33 +365,17 @@ def post_transfer( function_name="make_gif", token=self._token, instrument_name=environment.instrument_name, - year=datetime.now().year, - visit_name=environment.visit, - session_id=environment.murfey_session, data={ "lamella_number": lamella_number, "images": [str(file) for file in gif_list], "raw_directory": raw_directory, }, + # Endpoint kwargs + year=datetime.now().year, + visit_name=environment.visit, + session_id=environment.murfey_session, ) - elif transferred_file.name == "ProjectData.dat": - with open(transferred_file, "r") as dat: - try: - for_parsing = dat.read() - except Exception: - logger.warning(f"Failed to parse file {transferred_file}") - return - metadata = xmltodict.parse(for_parsing) - sites = metadata["AutoTEM"]["Project"]["Sites"]["Site"] - for site in sites: - number = _number_from_name(site["Name"]) - milling_angle = site["Workflow"]["Recipe"][0]["Activities"][ - "MillingAngleActivity" - ].get("MillingAngle") - if self._lamellae.get(number) and milling_angle: - self._lamellae[number]._replace( - angle=float(milling_angle.split(" ")[0]) - ) + # ----------------------------------------------------------------------------- # Maps # ----------------------------------------------------------------------------- @@ -233,6 +413,110 @@ def post_transfer( elif self._acquisition_software == "meteor": pass + def _parse_autotem_metadata(self, file: Path): + """ + Helper function to parse the 'ProjectData.dat' file produced by the AutoTEM. + This file contains metadata information on the milling sites set by the user, + along with the configured milling steps and their completion status. + """ + + try: + root = ET.parse(file).getroot() + except Exception: + logger.warning(f"Error parsing file {str(file)}", exc_info=True) + return None + + # Get the project name + if (project_name := _parse_xml_text(root, ".//Project/Name", str)) is None: + logger.warning("Metadata file has no project name") + return None + + # Find all the Site nodes + if not (sites := root.findall(".//Sites/Site")): + logger.warning(f"No site information found in {str(file)}") + return None + + # Iterate through Site nodes + all_site_info: dict[int, LamellaSiteInfo] = {} + for site in sites: + # Extract site name and number + if (site_name := _parse_xml_text(site, "Name", str)) is None: + logger.warning("Current site doesn't have a name") + continue + site_num = _number_from_name(site_name) + site_info = LamellaSiteInfo( + project_name=project_name, + site_name=site_name, + site_number=site_num, + steps=MillingSteps(), + ) + + # Extract stage position information for all known stages in current site + site_info.stage_info = StagePositionInfo() + for stage_name, stage_path in STAGE_POSITION_NAMES.items(): + if (stage := site.find(stage_path)) is not None: + stage_values = StagePositionValues() + for value_name, value_path in STAGE_POSITION_VALUES.items(): + if ( + value := _parse_xml_text( + stage, value_path, _parse_measurement + ) + ) is not None: + stage_values.__setattr__(value_name, value) + site_info.stage_info.__setattr__(stage_name, stage_values) + + # Find all Recipe nodes for the Site + if not (recipes := site.findall("Workflow/Recipe")): + # Early skip if no recipes are found + logger.warning(f"No recipes found for site {site_name}") + continue + + # Create dataclasses for each site + for recipe in recipes: + if (recipe_name := _parse_xml_text(recipe, "Name", str)) is None: + # Early skip if the Recipe has no Name + logger.warning("Recipe doesn't have a name, skipping") + continue + + # Find all the nodes under Activities + if (activities := recipe.find("Activities")) is None: + # Early skip if none exist + logger.warning(f"Recipe {recipe_name} doesn't have any activities") + continue + + # Iterate through the activities + for activity in activities: + if ( + activity_name := _parse_xml_text(activity, "Name", str) + ) is None: + # Early skip if activity has no name + logger.warning( + f"Activitiy in recipe {recipe_name} doesn't have a name, skipping" + ) + continue + + # Create a unique name based on recipe and activity names + unique_name = f"{recipe_name} - {activity_name}" + step_info = MillingStepInfo( + step_name=activity_name, recipe_name=recipe_name + ) + + # Iteratively update fields in the MillingSteps model it's not None + for field, path, func in ACTIVITY_FIELD_MAP: + if (value := _parse_xml_text(activity, path, func)) is not None: + step_info.__setattr__(field, value) + + # Add info for current step to the site info model + site_info.steps.__setattr__( + MILLING_STEP_NAMES[unique_name], step_info + ) + + # Add info for current site to the dict + all_site_info[site_num] = site_info + + logger.info(f"Successfully extracted AutoTEM metadata from file {file}") + return all_site_info + def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment): """ Constructs the URL and dictionary to be posted to the server, which then triggers @@ -247,6 +531,7 @@ def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment): token=self._token, instrument_name=environment.instrument_name, data={"file": str(file)}, + # Endpoint kwargs session_id=environment.murfey_session, ) logger.info(f"Registering atlas image {file.name!r}") diff --git a/src/murfey/server/api/workflow_fib.py b/src/murfey/server/api/workflow_fib.py index 5e21f497..44406757 100644 --- a/src/murfey/server/api/workflow_fib.py +++ b/src/murfey/server/api/workflow_fib.py @@ -1,3 +1,4 @@ +import json import logging from importlib.metadata import entry_points from pathlib import Path @@ -8,6 +9,7 @@ from murfey.server.api.auth import validate_instrument_token from murfey.server.murfey_db import murfey_db +from murfey.util.models import LamellaSiteInfo logger = logging.getLogger("murfey.server.api.workflow_fib") @@ -43,3 +45,15 @@ def register_fib_atlas( file=fib_atlas_info.file, murfey_db=db, ) + + +@router.post("/sessions/{session_id}/register_milling_progress") +def register_fib_milling_progress( + session_id: int, + site_info: LamellaSiteInfo, + db: Session = murfey_db, +): + logger.debug( + "Received the following FIB metadata for registration:\n" + f"{json.dumps(site_info.model_dump(exclude_none=True), indent=2, default=str)}" + ) diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 7f92a743..86212f7f 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -5,11 +5,12 @@ from pathlib import Path from typing import Any, Dict, List, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, computed_field, field_validator """ +======================================================================================= General Models -============== +======================================================================================= Models used in multiple workflows. """ @@ -91,8 +92,142 @@ class UpstreamFileRequestInfo(BaseModel): """ +======================================================================================= +FIB +======================================================================================= +Models related to the FIB workflow. +""" + + +class StagePositionValues(BaseModel): + # Coordinates are in metres + x: float | None = None + y: float | None = None + z: float | None = None + # Angles are in degrees + rotation: float | None = None + tilt_alpha: float | None = None + + @computed_field + def slot_number(self) -> int | None: + if self.x is None: + return None + return 1 if self.x < 0 else 2 + + +class StagePositionInfo(BaseModel): + """ + Stage position values associated with the different stages of the milling + process. The XML paths they're associated with (with "Site" as the parent + node) are indicated in the comments. + + The image acquisition steps have a "SiteLocationType" field that appear to + be associated with either "ChunkSiteLocation" or "ThinningSiteLocation". + "ThinningStagePosition" appears to be a duplicate of "ThinningSiteLocation" + so far, and it is unclearf for now what stages "PreparationSiteLocation" + and "ChunkCoincidenceStagePosition" currently correspond to. + """ + + preparation: StagePositionValues | None = ( + None # PreparationSiteLocation/StagePosition/StagePosition + ) + chunk_coincidence: StagePositionValues | None = ( + None # Parameters/ChunkCoincidenceStagePosition/StagePosition + ) + chunk: StagePositionValues | None = ( + None # ChunkSiteLocation/StagePosition/StagePosition + ) + thinning_1: StagePositionValues | None = ( + None # Parameters/ThinningStagePosition/StagePosition + ) + thinning_2: StagePositionValues | None = ( + None # ThinningSiteLocation/StagePosition/StagePosition + ) + + +class MillingStepInfo(BaseModel): + """ + These are the parameters configured per milling step that we are interested + in tracking. Some attributes are present only for certain steps. + """ + + # Step setup + step_name: str | None = None + recipe_name: str | None = None + is_enabled: bool | None = None + status: str | None = None + execution_time: float | None = None + + # Associated stage position information + site_location_type: str | None = None + + # Beam info + beam_type: str | None = None + voltage: float | None = None + current: float | None = None + + # Milling info + milling_angle: float | None = None + depth_correction: float | None = None + lamella_offset: float | None = None + trench_height_front: float | None = None + trench_height_rear: float | None = None + width_overlap_front_left: float | None = None + width_overlap_front_right: float | None = None + width_overlap_rear_left: float | None = None + width_overlap_rear_right: float | None = None + + +class MillingSteps(BaseModel): + # Processing steps supported by AutoTEM + # Preparation stage + eucentric_tilt: MillingStepInfo | None = None + artificial_features: MillingStepInfo | None = None + milling_angle: MillingStepInfo | None = None + image_acquisition: MillingStepInfo | None = None + lamella_placement: MillingStepInfo | None = None + # Milling stage + delay_1: MillingStepInfo | None = None + reference_definition: MillingStepInfo | None = None + reference_definition_electron: MillingStepInfo | None = None + stress_relief_cuts: MillingStepInfo | None = None + reference_redefinition_1: MillingStepInfo | None = None + rough_milling: MillingStepInfo | None = None + rough_milling_electron: MillingStepInfo | None = None + reference_redefinition_2: MillingStepInfo | None = None + medium_milling: MillingStepInfo | None = None + medium_milling_electron: MillingStepInfo | None = None + fine_milling: MillingStepInfo | None = None + fine_milling_electron: MillingStepInfo | None = None + finer_milling: MillingStepInfo | None = None + finer_milling_electron: MillingStepInfo | None = None + # Thinning stage + delay_2: MillingStepInfo | None = None + polishing_1: MillingStepInfo | None = None + polishing_1_electron: MillingStepInfo | None = None + polishing_2: MillingStepInfo | None = None + polishing_2_ion: MillingStepInfo | None = None + polishing_2_electron: MillingStepInfo | None = None + + +class LamellaSiteInfo(BaseModel): + """ + Pydantic model that stores all the metadata of interest for a single lamella + site. + """ + + # Values not associated with a single step + project_name: str | None = None + site_name: str | None = None + site_number: int | None = None + stage_info: StagePositionInfo | None = None + steps: MillingSteps | None = None + + +""" +======================================================================================= Single Particle Analysis -======================== +======================================================================================= Models related to the single-particle analysis workflow. """ @@ -218,8 +353,9 @@ class Token(BaseModel): """ +======================================================================================= Tomography -========== +======================================================================================= Models related to the tomographic reconstruction workflow. """ diff --git a/src/murfey/util/route_manifest.yaml b/src/murfey/util/route_manifest.yaml index 787760f2..dcbc9ece 100644 --- a/src/murfey/util/route_manifest.yaml +++ b/src/murfey/util/route_manifest.yaml @@ -1440,3 +1440,10 @@ murfey.server.api.workflow_fib.router: type: int methods: - POST + - path: /workflow/fib/sessions/{session_id}/register_milling_progress + function: register_fib_milling_progress + path_params: + - name: session_id + type: int + methods: + - POST diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index b9344051..88538b6d 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -129,14 +129,32 @@ def test_file_transferred_to( ) == destination_dir / file.relative_to(visit_dir) +@pytest.mark.parametrize( + "test_params", + ( # File type to test | Use environment? | Find source? | Find destination? + ("drift_correction", True, True, True), + ("drift_correction", False, True, True), + ("drift_correction", True, False, True), + ("drift_correction", True, True, False), + ), +) def test_fib_autotem_context( mocker: MockerFixture, + test_params: tuple[str, bool, bool, bool], tmp_path: Path, visit_dir: Path, fib_autotem_dc_images: list[Path], ): + # Unpack test params + file_type, use_env, find_source, find_dst = test_params + # Mock the environment - mock_environment = MagicMock() + mock_environment = None + if use_env: + mock_environment = MagicMock() + + # Mock the logger to check if specific logs are triggered + mock_logger = mocker.patch("murfey.client.contexts.fib.logger") # Create a list of destinations destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" @@ -145,11 +163,9 @@ def test_fib_autotem_context( ] # Mock the functions used in 'post_transfer' - mock_get_source = mocker.patch( - "murfey.client.contexts.fib._get_source", return_value=tmp_path - ) + mock_get_source = mocker.patch("murfey.client.contexts.fib._get_source") mock_file_transferred_to = mocker.patch( - "murfey.client.contexts.fib._file_transferred_to", side_effect=destination_files + "murfey.client.contexts.fib._file_transferred_to" ) mock_capture_post = mocker.patch("murfey.client.contexts.fib.capture_post") @@ -162,19 +178,38 @@ def test_fib_autotem_context( token="", ) - # Parse images one-by-one and check that expected calls were made - for file in fib_autotem_dc_images: - context.post_transfer(file, environment=mock_environment) - mock_get_source.assert_called_with(file, mock_environment) - mock_file_transferred_to.assert_called_with( - environment=mock_environment, - source=basepath, - file_path=file, - rsync_basepath=Path(""), - ) - assert mock_capture_post.call_count == len(fib_autotem_dc_images) - assert len(context._milling) == num_lamellae - assert len(context._lamellae) == num_lamellae + match file_type: + case "drift_correction": + # Add case-specific return values and side-effects to the mocks + mock_get_source.return_value = tmp_path if find_source else None + if find_dst: + mock_file_transferred_to.side_effect = destination_files + else: + mock_file_transferred_to.return_value = None + + # Parse images one-by-one and check that expected calls were made + for file in fib_autotem_dc_images: + context.post_transfer(file, environment=mock_environment) + if not use_env: + mock_logger.warning.assert_called_with("No environment passed in") + elif not find_source: + mock_logger.warning.assert_called_with( + f"No source found for file {file}" + ) + elif not find_dst: + mock_logger.warning.assert_called_with( + f"File {file.name!r} not found on storage system" + ) + else: + mock_get_source.assert_called_with(file, mock_environment) + mock_file_transferred_to.assert_called_with( + environment=mock_environment, + source=basepath, + file_path=file, + rsync_basepath=Path(""), + ) + assert mock_capture_post.call_count == len(fib_autotem_dc_images) + assert len(context._drift_correction_images) == num_lamellae def test_fib_maps_context(