Unverified Commit 5122af87 authored by Jose Borreguero's avatar Jose Borreguero Committed by GitHub
Browse files

integration test for a valid dataset (#252)

* integration test for valid dataset
* use np.testing.assert_allclose
* correct over-assertion
* mark as datarepo
* force conversion to np.array
* explicit extension for glob pattern of TIFF files
* Follow save_dir signature change
* Use different function to load tiffs
* fix test_run fetching of output
* change omegas to rot_angles
* Fix finding tiffs
* Add missing arguments
* add tmpdir fixture
* endow input data dirs with absolute path
* add gold data
* refactor saving the log and config
* use same conda incubator in unit and integration testing
* force download of miniconda
* avoid having two tests writing to the same output directory
* detach the filehandler before moving the log file
* sort the saved tiffiles upon loading
* migrate to fixture tmpdir
* unskip two tests
parent f7ccb8cc
Loading
Loading
Loading
Loading
+3 −2
Original line number Diff line number Diff line
@@ -28,9 +28,10 @@ jobs:
          lfs: true
      - uses: conda-incubator/setup-miniconda@v2
        with:
          miniforge-variant: Mambaforge
          miniconda-version: "latest"
          auto-update-conda: true
          mamba-version: "*"
          environment-file: environment.yml
          use-mamba: true
      - name: Tests with data repository
        run: python -m pytest -m datarepo
      - name: Update job status
+22 −21
Original line number Diff line number Diff line
@@ -72,19 +72,20 @@ def main(inputfile: Union[str, Path], outputdir: Union[str, Path]) -> int:
    inputfile = Path(inputfile)
    outputdir = Path(outputdir)

    time_str = to_time_str()  # date stamp for log and configuration files

    # create log file to capture the root logger, in order to also capture messages from the backend
    log_fn = outputdir / f"reduce_CG1D_{time_str}.log"
    log_fh = logging.FileHandler(log_fn)
    log_fh.setLevel(logging.INFO)
    logging.getLogger().addHandler(log_fh)

    # verify the inputs are sensible
    input_checking = _validate_inputs(inputfile, outputdir)
    if input_checking > 0:
        return input_checking

    time_str = to_time_str()  # date stamp for log and configuration files

    # create log file to capture the root logger, in order to also capture messages from the backend
    log_file_path = outputdir / f"reduce_CG1D_{time_str}.log"
    log_file_handler = logging.FileHandler(log_file_path)
    log_file_handler.setLevel(logging.INFO)
    root_logger = logging.getLogger()
    root_logger.addHandler(log_file_handler)

    # check if data is ready for reduction
    if not auto_reduction_ready(inputfile):
        logger.warning("Data incomplete, waiting for next try.")
@@ -111,13 +112,6 @@ def main(inputfile: Union[str, Path], outputdir: Union[str, Path]) -> int:
        logger.exception("Unable to update the template configuration")
        return ERROR_GENERAL

    # save config file to working directory
    # NOTE:
    #  i.e. ironman_20221108_154015.json
    exp_name = config_dict["name"].replace(" ", "_")
    config_fn = outputdir / f"{exp_name}_{time_str}.json"
    save_config(config_dict, config_fn)

    # call the auto reduction with updated dict
    try:
        workflow = WorkflowEngineAuto(config_dict)
@@ -127,12 +121,19 @@ def main(inputfile: Union[str, Path], outputdir: Union[str, Path]) -> int:
        logger.exception("Failed to create and run workflow")
        exit_code = e.exit_code

    # move files to image directory if auto-reduction is successful
    logging.shutdown()  # flushing and closing all handlers
    target_dir = workflow.registry["save_dir"]
    if exit_code == WORKFLOW_SUCCESS:
        shutil.move(config_fn, target_dir)
        shutil.move(log_fn, target_dir)
    # save configuration and log files to appropriate directory
    config_file_name = config_dict["name"].replace(" ", "_")
    radiographs_dir = workflow.registry.get("save_dir")
    if exit_code == WORKFLOW_SUCCESS and radiographs_dir:
        config_file_path = radiographs_dir / f"{config_file_name}_{time_str}.json"
        save_config(config_dict, config_file_path)
        log_file_handler.flush()
        root_logger.removeHandler(log_file_handler)
        log_file_handler.close()
        shutil.move(log_file_path, radiographs_dir)  # move the log file to the radiographs directory
    else:
        config_file_path = outputdir / f"{config_file_name}_{time_str}.json"
        save_config(config_dict, config_file_path)

    return exit_code

+2 −2
Original line number Diff line number Diff line
@@ -107,8 +107,8 @@
            "function": "imars3d.backend.dataio.data.save_data",
            "inputs": {
                "data": "result",
                "filename": "test",
                "outputdir" : "outputdir"
                "name": "test",
                "outputbase" : "outputdir"
            },
            "outputs": ["save_dir"]
        }
+1 −1
Original line number Diff line number Diff line
@@ -263,7 +263,7 @@ class denoise(param.ParameterizedFunction):
        params = param.ParamOverrides(self, params)

        # type validation is done, now replacing max_worker with an actual integer
        self.max_workers = clamp_max_workers(self.max_workers)
        self.max_workers = clamp_max_workers(params.max_workers)
        logger.debug(f"max_worker={self.max_workers}")
        denoised_array = None
        if params.method == "median":
+5 −0
Original line number Diff line number Diff line
#!/usr/bin/env python3
"""Configuration file handler for the imars3d."""
import json
import logging
from pathlib import Path
from typing import Union

# setup module level logger
logger = logging.getLogger(__name__)


def save_config(
    config_dict: dict,
@@ -30,3 +34,4 @@ def save_config(
    # now write to disk
    with open(filepath, "w") as outfile:
        json.dump(config_dict, outfile, indent=2, sort_keys=False)
    logger.info(f"Configuration saved to {str(filepath)}")
Loading