Commit 0b09a7d0 authored by Paul Schütze's avatar Paul Schütze
Browse files

Merge branch 'tests_to_doc' into 'master'

Move Test Description Into Configuration Files

Closes #207

See merge request allpix-squared/allpix-squared!535
parents a01cd37a 2336f0d0
Loading
Loading
Loading
Loading
+56 −32
Original line number Diff line number Diff line
@@ -53,9 +53,42 @@ ENDIF()
# FIXME: this should be combined with the ADD_DEFINITIONS
CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/cmake/config.cmake.h" "${CMAKE_CURRENT_BINARY_DIR}/config.h" @ONLY)

# Configure the installation prefix to allow both local as system-wide installation
IF(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
    SET(CMAKE_INSTALL_PREFIX
        "${PROJECT_SOURCE_DIR}"
        CACHE PATH "Prefix prepended to install directories" FORCE)
ENDIF()
MESSAGE(STATUS "Installing to \"${CMAKE_INSTALL_PREFIX}\"")

INCLUDE(cmake/CPackConfig.cmake)
INCLUDE(CPack)

# Include allpix cmake functions
INCLUDE("cmake/AllpixMacros.cmake")

###################################
# Setup tests for allpix          #
###################################

OPTION(TEST_CORE "Perform unit tests to ensure framework core functionality?" ON)
OPTION(TEST_MODULES "Perform unit tests to ensure module functionality?" ON)
OPTION(TEST_PERFORMANCE "Perform unit tests to ensure framework performance?" ON)
OPTION(TEST_EXAMPLES "Perform unit tests to ensure example validity?" ON)

SET(_MODULES_WITH_TESTS
    ""
    CACHE INTERNAL "MODULES_WITH_TESTS")

# Enable testing
ENABLE_TESTING()

# Include example configurations:
ADD_SUBDIRECTORY(examples)

# Include all tests
ADD_SUBDIRECTORY(etc/unittests)

###############################################
# Setup the environment for the documentation #
###############################################
@@ -67,6 +100,23 @@ ADD_SUBDIRECTORY(doc)

# If only building docs, stop processing the rest of the CMake file:
IF(BUILD_DOCS_ONLY)
    FILE(
        GLOB_RECURSE module_tests
        LIST_DIRECTORIES false
        RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/src/modules"
    ${CMAKE_CURRENT_SOURCE_DIR}/src/modules/*/*/[00-99]*.conf)
    FOREACH(test ${module_tests})
        GET_FILENAME_COMPONENT(title ${test} NAME_WE)
        GET_FILENAME_COMPONENT(dir "${test}/../.." ABSOLUTE)
        GET_FILENAME_COMPONENT(mod "${dir}" NAME)
        ADD_ALLPIX_TEST(${CMAKE_CURRENT_SOURCE_DIR}/src/modules/${test} "modules/${mod}/${title}")
    ENDFOREACH()

    SET(MODULES_TEST_DESCRIPTIONS "${TEST_DESCRIPTIONS}")
    GET_PROPERTY(CORE_TEST_DESCRIPTIONS GLOBAL PROPERTY CORE_TEST_DESCRIPTIONS)
    GET_PROPERTY(PERF_TEST_DESCRIPTIONS GLOBAL PROPERTY PERF_TEST_DESCRIPTIONS)
    CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/doc/usermanual/chapters/tests.cmake.tex"
                   "${CMAKE_CURRENT_BINARY_DIR}/usermanual/usermanual/tests.tex" @ONLY)
    RETURN()
ENDIF()

@@ -80,13 +130,6 @@ ENABLE_LANGUAGE(CXX)
# Additional packages to be searched for by cmake
LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)

# Configure the installation prefix to allow both local as system-wide installation
IF(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
    SET(CMAKE_INSTALL_PREFIX
        "${PROJECT_SOURCE_DIR}"
        CACHE PATH "Prefix prepended to install directories" FORCE)
ENDIF()

# Set up the RPATH so executables find the libraries even when installed in non-default location
SET(CMAKE_MACOSX_RPATH TRUE)
SET(CMAKE_SKIP_BUILD_RPATH FALSE)
@@ -115,9 +158,6 @@ IF(NOT APPLE AND NOT CMAKE_LINK_WHAT_YOU_USE)
        CACHE STRING "Choose whether to only link libraries which contain symbols actually used by the target." FORCE)
ENDIF()

# Include allpix cmake functions
INCLUDE("cmake/AllpixMacros.cmake")

###################################
# Define build flags for allpix   #
###################################
@@ -305,28 +345,6 @@ LIST(REMOVE_ITEM CHECK_CMAKE_FILES "cmake/LATEX.cmake" "cmake/PANDOC.cmake" "cma

INCLUDE("cmake/cmake-checks.cmake")

###################################
# Setup tests for allpix          #
###################################

OPTION(TEST_CORE "Perform unit tests to ensure framework core functionality?" ON)
OPTION(TEST_MODULES "Perform unit tests to ensure module functionality?" ON)
OPTION(TEST_PERFORMANCE "Perform unit tests to ensure framework performance?" ON)
OPTION(TEST_EXAMPLES "Perform unit tests to ensure example validity?" ON)

SET(_MODULES_WITH_TESTS
    ""
    CACHE INTERNAL "MODULES_WITH_TESTS")

# Enable testing
ENABLE_TESTING()

# Include example configurations:
ADD_SUBDIRECTORY(examples)

# Include all tests
ADD_SUBDIRECTORY(etc/unittests)

###################################
# Define build targets for allpix #
###################################
@@ -404,6 +422,12 @@ ELSE()
    MESSAGE(STATUS "Unit tests: performance tests deactivated.")
ENDIF()

GET_PROPERTY(CORE_TEST_DESCRIPTIONS GLOBAL PROPERTY CORE_TEST_DESCRIPTIONS)
GET_PROPERTY(MODULES_TEST_DESCRIPTIONS GLOBAL PROPERTY MODULES_TEST_DESCRIPTIONS)
GET_PROPERTY(PERF_TEST_DESCRIPTIONS GLOBAL PROPERTY PERF_TEST_DESCRIPTIONS)
CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/doc/usermanual/chapters/tests.cmake.tex"
               "${CMAKE_CURRENT_BINARY_DIR}/usermanual/usermanual/tests.tex" @ONLY)

############################
# Create local setup files #
############################
+22 −0
Original line number Diff line number Diff line
@@ -186,6 +186,21 @@ FUNCTION(add_allpix_test test name)
        SET(clioptions "${clioptions} ${opt}")
    ENDFOREACH()

    # Register the test for inclusion in the documentation:
    FILE(STRINGS ${test} DESC REGEX "#DESC ")
    LIST(LENGTH DESC listcount_desc)
    IF(listcount_desc EQUAL 0)
        MESSAGE(WARNING "Test ${name} does not provide a description")
    ELSEIF(listcount_desc GREATER 1)
        MESSAGE(FATAL_ERROR "More than one DESC expressions defined in test ${name}")
    ELSE()
        STRING(REPLACE "#DESC " "\\item[\\file{${name}}] " DESC "${DESC}")
        LIST(APPEND TEST_DESCRIPTIONS ${DESC})
    ENDIF()
    SET(TEST_DESCRIPTIONS
        ${TEST_DESCRIPTIONS}
        PARENT_SCOPE)

    # Parse possible commands to be run before
    FILE(STRINGS ${test} OPTS REGEX "#BEFORE_SCRIPT ")
    FOREACH(opt ${OPTS})
@@ -278,6 +293,13 @@ MACRO(ALLPIX_MODULE_TESTS name directory)
                "${_allpix_module_dir};${_MODULES_WITH_TESTS}"
                CACHE INTERNAL "MODULES_WITH_TESTS")
        ENDIF()

        # Append list of test descriptions to global property
        GET_PROPERTY(tmp GLOBAL PROPERTY MODULES_TEST_DESCRIPTIONS)
        FOREACH(item ${TEST_DESCRIPTIONS})
            SET(tmp "${tmp} ${item}")
        ENDFOREACH()
        SET_PROPERTY(GLOBAL PROPERTY MODULES_TEST_DESCRIPTIONS "${tmp}")
    ENDIF()
ENDMACRO()

+5 −0
Original line number Diff line number Diff line
@@ -154,4 +154,9 @@
\addreferencesline
\printbibliography

\appendix
\chapter{Appendix}

\input{tests.tex}

\end{document}
+0 −86
Original line number Diff line number Diff line
@@ -296,89 +296,3 @@ If a test is expected to create multiple error or warning messages which cannot
#PASS (ERROR) Multithreading disabled since the current module configuration does not support it
#FAIL FATAL
\end{minted}

\paragraph{Framework Functionality Tests}

The framework functionality tests aim at reproducing basic features such as correct parsing of configuration keys or resolution of module instantiations.
Currently implemented tests comprise:

\begin{description}
    \item[\file{test_01-1_globalconfig_detectors.conf}] tests the framework behavior in case of a non-existent detector setup description file.
    \item[\file{test_01-2_globalconfig_modelpaths.conf}] tests the correct parsing of additional model paths and the loading of the detector model.
    \item[\file{test_01-3_globalconfig_log_format.conf}] switches the logging format.
    \item[\file{test_01-4_globalconfig_log_level.conf}] sets a different logging verbosity level.
    \item[\file{test_01-5_globalconfig_log_file.conf}] configures the framework to write log messages into a file.
    \item[\file{test_01-6_globalconfig_missing_model.conf}] tests the behavior of the framework in case of a missing detector model file.
    \item[\file{test_01-7_globalconfig_random_seed.conf}] sets a defined random seed to start the simulation with.
    \item[\file{test_01-8_globalconfig_random_seed_core.conf}] sets a defined seed for the core component seed generator, e.g. used for misalignment.
    \item[\file{test_01-9_globalconfig_librarydirectory.conf}] tests the correct parsing and usage of additional library loading paths.
    \item[\file{test_02-1_specialization_unique_name.conf}] tests the framework behavior for an invalid module configuration: attempt to specialize a unique module for one detector instance.
    \item[\file{test_02-2_specialization_unique_type.conf}] tests the framework behavior for an invalid module configuration: attempt to specialize a unique module for one detector type.
    \item[\file{test_02-3_specialization_name.conf}] tests module instance specialization by name
    \item[\file{test_02-4_specialization_type.conf}] tests module instance specialization by type
    \item[\file{test_03-1_geometry_g4_coordinate_system.conf}] ensures that the \apsq and Geant4 coordinate systems and transformations are identical.
    \item[\file{test_03-2_geometry_rotations.conf}] tests the correct interpretation of rotation angles in the detector setup file.
    \item[\file{test_03-3_geometry_misaligned.conf}] tests the correct calculation of misalignments from alignment precisions given in the detector setup file.
    \item[\file{test_03-4_geometry_overwrite.conf}] checks that detector model parameters are overwritten correctly as described in Section~\ref{sec:detector_models}.
    \item[\file{test_03-5_geometry_invalid_implant.conf}] checks for correct detection of invalid implant size configurations
    \item[\file{test_03-6_geometry_overlap.conf}] checks for correct detection of volume overlaps in the geometry
    \item[\file{test_03-7_geometry_wrapper.conf}] checks for correct treatment of geometry wrappers and overlap calculations
    \item[\file{test_03-8_geometry_noposition.conf}] test the framework behavior with a detector with no position provided in the geometry
    \item[\file{test_04-1_configuration_cli_change.conf}] tests whether single configuration values can be overwritten by options supplied via the command line.
    \item[\file{test_04-2_configuration_cli_nochange.conf}] tests whether command line options are correctly assigned to module instances and do not alter other values.
    \item[\file{test_04-3_configuration_imbalanced_brackets.conf}] tests whether imbalanced brackets in configuration values are properly detected.
    \item[\file{test_04-4_detector_config_cli_change.conf}] tests whether detector options can be overwritten from the command line.
    \item[\file{test_04-5_module_config_cli_detectors.conf}] tests whether framework parameters are properly parsed from the command line.
    \item[\file{test_04-6_module_config_double_unique.conf}] tests whether a double definition of a unique module is detected.
    \item[\file{test_04-7_module_config_empty_filter.conf}] tests the framework behavior with an empty filter.
    \item[\file{test_04-8_configuration_unused_key.conf}] tests the detection of unused configuration keys in the global configuration section.
    \item[\file{test_04-9_configuration_unused_key_module.conf}] tests the detection of unused configuration keys in a module configuration section.
    \item[\file{test_05-1_overwrite_same_denied.conf}] tests whether two modules writing to the same file is disallowed if overwriting is denied.
    \item[\file{test_05-overwrite_module_allowed.conf}] tests whether two modules writing to the same file is allowed if the last one re-enables overwriting locally.
    \item[\file{test_06-1_multithreading.conf}] checks if multithreading can be enabled.
    \item[\file{test_06-2_multithreading_cli.conf}] checks if multithreading can be enabled from the command line.
    \item[\file{test_06-3_multithreading_concurrency.conf}] tests if the number of workers can be configured.
    \item[\file{test_06-4_multithreading_zeroworkers.conf}] tests the framework response in case too few workers are enabled.
    \item[\file{test_06-5_multithreading_buffers.conf}] tests if the module buffer depth can be configured properly.
    \item[\file{test_06-6_multithreading_impossible.conf}] tests the framework response in case a module without multithreading capabilities has been enabled.
    \item[\file{test_06-7_multithreading_disabled.conf}] tests the framework response to explicitly disabling multithreading.
    \item[\file{test_06-8_multithreading_buffered.conf}] tests the reproducibility in case of a sequential module.
    \item[\file{test_06-9_multithreading_physics.conf}] tests the reproducibility in case of multithreading enabled.
    \item[\file{test_06-10_multithreading_physics_singlethr.conf}] tests the reproducibility in case of multithreading disabled.
    \item[\file{test_07-1_catch_exception.conf}] checks the correct propagation of exceptions with multithreading enabled.
    \item[\file{test_07-2_catch_exception_nomt.conf}] checks the correct propagation of exceptions with multithreading disabled.
\end{description}


\paragraph{Module Functionality Tests}

These tests ensure the proper functionality of each module covered and thus protect the framework against accidental changes affecting the physics simulation.
Using a fixed seed (using the \parameter{random_seed} configuration keyword) together with a specific version of Geant4~\cite{geant4} allows to reproduce the same simulation event.

One event is produced per test and the \parameter{DEBUG}-level logging output of the respective module is checked against pre-defined expectation output using regular expressions.
Once modules are altered, their respective expectation output has to be adapted after careful verification of the simulation result.

Module tests are located within the individual module source folders and are only enabled if the respective module will be built.


\paragraph{Performance Tests}

Similar to the module test implementation described above, performance tests use configurations prepared such, that one particular module takes most of the load (dubbed the ``slowest instantiation'' by \apsq), and a few of thousand events are simulated starting from a fixed seed for the pseudo-random number generator.
The \parameter{#TIMEOUT} keyword in the configuration file will ask CTest to abort the test after the given running time.

In the project CI, performance tests are limited to native runners, i.e. they are not executed on docker hosts where the hypervisor decides on the number of parallel jobs.
Only one test is performed at a time.

Despite these countermeasures, fluctuations on the CI runners occur, arising from different loads of the executing machines.
Thus, all performance CI jobs are marked with the \parameter{allow_failure} keyword which allows GitLab to continue processing the pipeline but will mark the final pipeline result as ``passed with warnings'' indicating an issue in the pipeline.
These tests should be checked manually before merging the code under review.

Current performance tests comprise:

\begin{description}
    \item[\file{test_01_deposition.conf}] tests the performance of charge carrier deposition in the sensitive sensor volume using Geant4~\cite{geant4}. A stepping length of \SI{1.0}{\um} is chosen, and \num{10000} events are simulated. The addition of an electric field and the subsequent projection of the charges are necessary since \apsq would otherwise detect that there are no recipients for the deposited charge carriers and skip the deposition entirely.
    \item[\file{test_02-1_propagation_generic.conf}] tests the very critical performance of the drift-diffusion propagation of charge carriers, as this is the most computing-intense module of the framework. Charge carriers are deposited and a propagation with 10 charge carriers per step and a fine spatial and temporal resolution is performed. The simulation comprises \num{500} events.
    \item[\file{test_02-2_propagation_project.conf}] tests the projection of charge carriers onto the implants, taking into account the diffusion only. Since this module is less computing-intense, a total of \num{5000} events are simulated, and charge carriers are propagated one-by-one.
    \item[\file{test_02-3_propagation_generic_multithread.conf}] tests the performance of multithreaded simulation. It utilizes the very same configuration as performance test 02-1 but in addition enables multithreading with four worker threads.
    \item[\file{test_03_multithreading.conf}] tests the performance of the framework when using multithreading with 4 workers to simulate \num{500} events. It uses a similar configuration as the example configuration.
\end{description}
+45 −0
Original line number Diff line number Diff line
\section{List of Tests}

\paragraph{Framework Functionality Tests}

The framework functionality tests aim at reproducing basic features such as correct parsing of configuration keys or resolution of module instantiations.
Currently implemented tests comprise:

\begin{description}
    @CORE_TEST_DESCRIPTIONS@
\end{description}


\paragraph{Module Functionality Tests}

These tests ensure the proper functionality of each module covered and thus protect the framework against accidental changes affecting the physics simulation.
Using a fixed seed (using the \parameter{random_seed} configuration keyword) together with a specific version of Geant4~\cite{geant4} allows to reproduce the same simulation event.

One event is produced per test and the \parameter{DEBUG}-level logging output of the respective module is checked against pre-defined expectation output using regular expressions.
Once modules are altered, their respective expectation output has to be adapted after careful verification of the simulation result.

Module tests are located within the individual module source folders and are only enabled if the respective module will be built.
The following tests are currently performed:

\begin{description}
    @MODULES_TEST_DESCRIPTIONS@
\end{description}


\paragraph{Performance Tests}

Similar to the module test implementation described above, performance tests use configurations prepared such, that one particular module takes most of the load (dubbed the ``slowest instantiation'' by \apsq), and a few of thousand events are simulated starting from a fixed seed for the pseudo-random number generator.
The \parameter{#TIMEOUT} keyword in the configuration file will ask CTest to abort the test after the given running time.

In the project CI, performance tests are limited to native runners, i.e. they are not executed on docker hosts where the hypervisor decides on the number of parallel jobs.
Only one test is performed at a time.

Despite these countermeasures, fluctuations on the CI runners occur, arising from different loads of the executing machines.
Thus, all performance CI jobs are marked with the \parameter{allow_failure} keyword which allows GitLab to continue processing the pipeline but will mark the final pipeline result as ``passed with warnings'' indicating an issue in the pipeline.
These tests should be checked manually before merging the code under review.

Current performance tests comprise:

\begin{description}
    @PERF_TEST_DESCRIPTIONS@
\end{description}
Loading