diff --git a/Framework/LiveData/inc/MantidLiveData/LoadLiveData.h b/Framework/LiveData/inc/MantidLiveData/LoadLiveData.h index 95be2c5e1634a860172f0efb8908b91f5bbacb0b..cbf7a56c0b39a6b27e1fd36f8ab5b3a291e99383 100644 --- a/Framework/LiveData/inc/MantidLiveData/LoadLiveData.h +++ b/Framework/LiveData/inc/MantidLiveData/LoadLiveData.h @@ -66,7 +66,7 @@ private: void appendChunk(Mantid::API::Workspace_sptr chunkWS); API::Workspace_sptr appendMatrixWSChunk(API::Workspace_sptr accumWS, Mantid::API::Workspace_sptr chunkWS); - void resetAllXToSingleBin(API::Workspace *workspace); + void updateDefaultBinBoundaries(API::Workspace *workspace); /// The "accumulation" workspace = after adding, but before post-processing Mantid::API::Workspace_sptr m_accumWS; diff --git a/Framework/LiveData/src/LiveDataAlgorithm.cpp b/Framework/LiveData/src/LiveDataAlgorithm.cpp index 61ef87e70a14db04745fba7678bd20e78be82e86..4ac1e436c9c252bcf7fcdfd6d636950fd4fd8afe 100644 --- a/Framework/LiveData/src/LiveDataAlgorithm.cpp +++ b/Framework/LiveData/src/LiveDataAlgorithm.cpp @@ -74,10 +74,7 @@ void LiveDataAlgorithm::initProps() { make_unique<PropertyWithValue<std::string>>("ProcessingAlgorithm", "", Direction::Input), "Name of the algorithm that will be run to process each chunk of data.\n" - "Optional. If blank, no processing will occur. Note that, if " - "PreserveEvents is enabled, any rebinning done in this step will be " - "lost. Use the Post-Process step for custom rebinning of " - "EventWorkspaces."); + "Optional. If blank, no processing will occur."); declareProperty( make_unique<PropertyWithValue<std::string>>("ProcessingProperties", "", @@ -89,19 +86,13 @@ void LiveDataAlgorithm::initProps() { "ProcessingScript", "", Direction::Input), "A Python script that will be run to process each chunk of " "data. Only for command line usage, does not appear on the " - "user interface. Note that, if PreserveEvents is enabled, " - "any rebinning done in this step will be lost. Use the " - "Post-Process step for custom rebinning of " - "EventWorkspaces."); + "user interface."); declareProperty(make_unique<FileProperty>("ProcessingScriptFilename", "", FileProperty::OptionalLoad, "py"), "A Python script that will be run to process each chunk of " "data. Only for command line usage, does not appear on the " - "user interface. Note that, if PreserveEvents is enabled, " - "any rebinning done in this step will be lost. Use the " - "Post-Process step for custom rebinning of " - "EventWorkspaces."); + "user interface."); std::vector<std::string> propOptions{"Add", "Replace", "Append"}; declareProperty( diff --git a/Framework/LiveData/src/LoadLiveData.cpp b/Framework/LiveData/src/LoadLiveData.cpp index d52545245f2a76b458cf194c0a569f2766f2f770..ba5fd01ffbec0515c3607933438386e504dc5987 100644 --- a/Framework/LiveData/src/LoadLiveData.cpp +++ b/Framework/LiveData/src/LoadLiveData.cpp @@ -417,6 +417,17 @@ Workspace_sptr LoadLiveData::appendMatrixWSChunk(Workspace_sptr accumWS, return accumWS; } +namespace { +bool isUsingDefaultBinBoundaries(const EventWorkspace *workspace) { + // only check first spectrum + const auto &x = workspace->binEdges(0); + if (x.size() > 2) + return false; + // make sure that they are sorted + return (x.front() < x.back()); +} +} // namespace + //---------------------------------------------------------------------------------------------- /** Resets all HistogramX in given EventWorkspace(s) to a single bin. * @@ -428,15 +439,17 @@ Workspace_sptr LoadLiveData::appendMatrixWSChunk(Workspace_sptr accumWS, * * @param workspace :: Workspace(Group) that will have its bins reset */ -void LoadLiveData::resetAllXToSingleBin(API::Workspace *workspace) { +void LoadLiveData::updateDefaultBinBoundaries(API::Workspace *workspace) { if (auto *ws_event = dynamic_cast<EventWorkspace *>(workspace)) { - ws_event->resetAllXToSingleBin(); + if (isUsingDefaultBinBoundaries(ws_event)) + ws_event->resetAllXToSingleBin(); } else if (auto *ws_group = dynamic_cast<WorkspaceGroup *>(workspace)) { auto num_entries = static_cast<size_t>(ws_group->getNumberOfEntries()); for (size_t i = 0; i < num_entries; ++i) { auto ws = ws_group->getItem(i); if (auto *ws_event = dynamic_cast<EventWorkspace *>(ws.get())) - ws_event->resetAllXToSingleBin(); + if (isUsingDefaultBinBoundaries(ws_event)) + ws_event->resetAllXToSingleBin(); } } } @@ -492,18 +505,17 @@ void LoadLiveData::exec() { this->setPropertyValue("LastTimeStamp", lastTimeStamp.toISO8601String()); // For EventWorkspaces, we adjust the X values such that all events fit - // within the bin boundaries. This is done both before and after the - // "Process" step. Any custom rebinning should be done in Post-Processing. - bool PreserveEvents = this->getProperty("PreserveEvents"); - if (PreserveEvents) - this->resetAllXToSingleBin(chunkWS.get()); + // within the bin boundaries + const bool preserveEvents = this->getProperty("PreserveEvents"); + if (preserveEvents) + this->updateDefaultBinBoundaries(chunkWS.get()); // Now we process the chunk Workspace_sptr processed = this->processChunk(chunkWS); EventWorkspace_sptr processedEvent = boost::dynamic_pointer_cast<EventWorkspace>(processed); - if (!PreserveEvents && processedEvent) { + if (!preserveEvents && processedEvent) { // Convert the monitor workspace, if there is one and it's necessary MatrixWorkspace_sptr monitorWS = processedEvent->monitorWorkspace(); auto monitorEventWS = @@ -548,18 +560,20 @@ void LoadLiveData::exec() { g_log.notice() << "Performing the " << accum << " operation.\n"; // Perform the accumulation and set the AccumulationWorkspace workspace - if (accum == "Replace") + if (accum == "Replace") { this->replaceChunk(processed); - else if (accum == "Append") + } else if (accum == "Append") { this->appendChunk(processed); - else + } else { // Default to Add. this->addChunk(processed); - // For EventWorkspaces, we adjust the X values such that all events fit - // within the bin boundaries. This is done both before and after the - // "Process" step. Any custom rebinning should be done in Post-Processing. - this->resetAllXToSingleBin(m_accumWS.get()); + // When adding events, the default bin boundaries may need to be updated. + // The function itself checks to see if it is appropriate + if (preserveEvents) { + this->updateDefaultBinBoundaries(m_accumWS.get()); + } + } // At this point, m_accumWS is set. diff --git a/Framework/LiveData/test/LoadLiveDataTest.h b/Framework/LiveData/test/LoadLiveDataTest.h index 7d3734e109d30a9aa3d0a9c3d533ec2a62395c04..1eb6147cd4b60efadd47adc4768bdb0913f376a4 100644 --- a/Framework/LiveData/test/LoadLiveDataTest.h +++ b/Framework/LiveData/test/LoadLiveDataTest.h @@ -289,7 +289,7 @@ public: // Accumulated workspace: it was rebinned, but rebinning should be reset TS_ASSERT_EQUALS(ws_accum->getNumberHistograms(), 2); TS_ASSERT_EQUALS(ws_accum->getNumberEvents(), 200); - TS_ASSERT_EQUALS(ws_accum->blocksize(), 1); + TS_ASSERT_EQUALS(ws_accum->blocksize(), 40); // The post-processed workspace was rebinned starting at 40e3 TS_ASSERT_EQUALS(ws->getNumberHistograms(), 2); diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py index 7e771323b5f73efb4fe7da2969d73de8e33a284c..4b136994689002a10f6a5c0714ab01b7377e796f 100644 --- a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py +++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnostics.py @@ -9,8 +9,8 @@ from mantid.api import (AlgorithmFactory, DataProcessorAlgorithm, InstrumentVali WorkspaceProperty, WorkspaceUnitValidator) from mantid.kernel import (CompositeValidator, Direction, FloatBoundedValidator, IntArrayBoundedValidator, IntArrayProperty, Property, StringArrayProperty, StringListValidator) -from mantid.simpleapi import (ClearMaskFlag, CloneWorkspace, CreateEmptyTableWorkspace, Divide, - ExtractMask, Integration, LoadMask, MaskDetectors, MedianDetectorTest, Plus, SolidAngle) +from mantid.simpleapi import (ClearMaskFlag, CloneWorkspace, CreateEmptyTableWorkspace, CreateSingleValuedWorkspace, Divide, + ExtractMask, Integration, LoadMask, MaskDetectors, MedianDetectorTest, Multiply, Plus, SolidAngle) import numpy import os.path @@ -83,10 +83,17 @@ def _createDiagnosticsReportTable(reportWSName, numberHistograms, algorithmLoggi def _createMaskWS(ws, name, algorithmLogging): """Return a single bin workspace with same number of histograms as ws.""" - maskWS, detList = ExtractMask(InputWorkspace=ws, - OutputWorkspace=name, - EnableLogging=algorithmLogging) - maskWS *= 0.0 + extractResult = ExtractMask(InputWorkspace=ws, + OutputWorkspace=name, + EnableLogging=algorithmLogging) + zeroWS = CreateSingleValuedWorkspace(DataValue=0., + ErrorValue=0., + EnableLogging=algorithmLogging, + StoreInADS=False) + maskWS = Multiply(LHSWorkspace=extractResult.OutputWorkspace, + RHSWorkspace=zeroWS, + OutputWorkspace=name, + EnableLogging=algorithmLogging) return maskWS @@ -793,10 +800,10 @@ class DirectILLDiagnostics(DataProcessorAlgorithm): DetectorList=userMask, ComponentList=maskComponents, EnableLogging=algorithmLogging) - maskWS, detectorList = ExtractMask(InputWorkspace=maskWS, - OutputWorkspace=maskWSName, - EnableLogging=algorithmLogging) - return maskWS + extractResult = ExtractMask(InputWorkspace=maskWS, + OutputWorkspace=maskWSName, + EnableLogging=algorithmLogging) + return extractResult.OutputWorkspace def _value(self, ws, propertyName, instrumentParameterName, defaultValue): """Return a suitable value either from a property, the IPF or the supplied defaultValue.""" diff --git a/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnosticsTest.py b/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnosticsTest.py index f96d3820519481349700b23bfebfbf3133cc77ad..35aa19c91d323eadb57e5aa0df2369ff4b0d9990 100644 --- a/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnosticsTest.py +++ b/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/DirectILLDiagnosticsTest.py @@ -181,6 +181,34 @@ class DirectILLDiagnosticsTest(unittest.TestCase): else: self.assertEquals(ys[0], 0) + def testMaskedComponents(self): + inWS = mtd[self._RAW_WS_NAME] + spectraCount = inWS.getNumberHistograms() + outWSName = 'diagnosticsWS' + kwargs = { + 'InputWorkspace': self._RAW_WS_NAME, + 'OutputWorkspace': outWSName, + 'ElasticPeakDiagnostics': 'Peak Diagnostics OFF', + 'BkgDiagnostics': 'Bkg Diagnostics OFF', + 'BeamStopDiagnostics': 'Beam Stop Diagnostics OFF', + 'DefaultMask': 'Default Mask OFF', + 'MaskedComponents': 'tube_1', + 'rethrow': True + } + run_algorithm('DirectILLDiagnostics', **kwargs) + self.assertTrue(mtd.doesExist(outWSName)) + outWS = mtd[outWSName] + self.assertEquals(outWS.getNumberHistograms(), spectraCount) + self.assertEquals(outWS.blocksize(), 1) + for i in range(spectraCount): + Ys = outWS.readY(i) + detector = outWS.getDetector(i) + componentName = detector.getFullName() + if 'tube_1' in componentName: + self.assertEquals(Ys[0], 1) + else: + self.assertEquals(Ys[0], 0) + def testOutputIsUsable(self): inWS = mtd[self._RAW_WS_NAME] spectraCount = inWS.getNumberHistograms() diff --git a/MantidPlot/CMakeLists.txt b/MantidPlot/CMakeLists.txt index b6d215007ee8a6e5888fa2e0d8a6c8829a3c2b17..9006e50ef3175fbdb82269c12129baf4258ef749 100644 --- a/MantidPlot/CMakeLists.txt +++ b/MantidPlot/CMakeLists.txt @@ -891,6 +891,7 @@ set ( MANTIDPLOT_TEST_PY_FILES MantidPlotMdiSubWindowTest.py MantidPlotTiledWindowTest.py MantidPlotInputArgsCheck.py + MantidPlotProjectRecovery.py TSVSerialiserTest.py ) diff --git a/MantidPlot/test/MantidPlotProjectRecovery.py b/MantidPlot/test/MantidPlotProjectRecovery.py new file mode 100644 index 0000000000000000000000000000000000000000..755aef40761fd1b2a2656b399b26ccafb3cc43e7 --- /dev/null +++ b/MantidPlot/test/MantidPlotProjectRecovery.py @@ -0,0 +1,55 @@ +import mantidplottests +import os +import platform +import unittest +import mantidplot +from mantidplottests import runTests, threadsafe_call +from glob import glob +#from mantid.kernel import * +from mantid import AnalysisDataService, ConfigService +from mantid.simpleapi import CreateWorkspace +from PyQt4 import QtGui +import shutil +import _qti + +path = os.path.join(ConfigService.getAppDataDirectory(), 'recovery', platform.node()) + +def cleanUp(): + # Clean Up + for cc in glob(os.path.join(path, "*", "")): + shutil.rmtree(cc) + AnalysisDataService.clear() + +class MantidPlotProjectRecovery(unittest.TestCase): + + + + def test_exec(self): + CreateWorkspace(OutputWorkspace="ws", DataX=[ + 1, 2, 3], DataY=[1, 2, 3], NSpec=1) + threadsafe_call(_qti.app.saveRecoveryCheckpoint) + + cleanUp() + #Test that it cleaned up after itself + self.assertEqual(len(glob(os.path.join(path, "*", ""))), 0) + + def test_checkpoint_creation(self): + CreateWorkspace(OutputWorkspace="ws", DataX=[ + 1, 2, 3], DataY=[1, 2, 3], NSpec=1) + + listOfCheckpointsBefore = glob(os.path.join(path, "*", "")) + for ii in listOfCheckpointsBefore: + shutil.rmtree(ii) + + threadsafe_call(_qti.app.saveRecoveryCheckpoint) + listOfCheckpointsAfter = glob(os.path.join(path, "*", "")) + + self.assertEqual(len(listOfCheckpointsAfter), 1) + + cleanUp() + #Test that it cleaned up after itself + self.assertEqual(len(glob(os.path.join(path, "*", ""))), 0) + + +# Run the unit tests +mantidplottests.runTests(MantidPlotProjectRecovery) diff --git a/buildconfig/CMake/GNUSetup.cmake b/buildconfig/CMake/GNUSetup.cmake index ebb044b0372261e9537b76cdead083c0499e5218..71e57587cac0d39bb42ee49eb6e3ab296671aea6 100644 --- a/buildconfig/CMake/GNUSetup.cmake +++ b/buildconfig/CMake/GNUSetup.cmake @@ -22,7 +22,7 @@ if ( CMAKE_COMPILER_IS_GNUCXX ) endif() set( GCC_COMPILER_VERSION ${CMAKE_CXX_COMPILER_VERSION} CACHE INTERNAL "") message( STATUS "gcc version: ${GCC_COMPILER_VERSION}" ) - if ( GCC_COMPILER_VERSION VERSION_LESS "5.1.0" ) + if ( NOT (GCC_COMPILER_VERSION VERSION_LESS "5.1.0") ) # Add an option to use the old C++ ABI if gcc is 5 series option ( USE_CXX98_ABI "If enabled, sets the _GLIBCXX_USE_CXX11_ABI=0 compiler flag" OFF) if ( USE_CXX98_ABI ) diff --git a/docs/source/algorithms/LoadLiveData-v1.rst b/docs/source/algorithms/LoadLiveData-v1.rst index f94295576e348cbcecc3563f3c4826891176c067..31f7af8a594d58ab50ed497c99c46b1ce2cff2f8 100644 --- a/docs/source/algorithms/LoadLiveData-v1.rst +++ b/docs/source/algorithms/LoadLiveData-v1.rst @@ -31,26 +31,20 @@ Data Processing - You have two options on how to process this workspace: Processing with an Algorithm -############################ +++++++++++++++++++++++++++++ - Specify the name of the algorithm in the ``ProcessingAlgorithm`` property. - - This could be, e.g. a `Python Algorithm <Python Algorithm>`__ - written for this purpose. + - This could be a python algorithm written for this purpose - The algorithm *must* have at least 2 properties: ``InputWorkspace`` and ``OutputWorkspace``. - Any other properties are set from the string in ``ProcessingProperties``. - - The algorithm is then run, and its ``OutputWorkspace`` is saved. - -.. note: - - When PreserveEvents is enabled, any rebinning done in this step will be - lost. Use the Post-Process step instead for EventWorkspaces. + - The algorithm is then run, and its ``OutputWorkspace`` is saved Processing with a Python Script -############################### ++++++++++++++++++++++++++++++++ The python script is run using :ref:`algm-RunPythonScript`. Please see its documentation for details of how the script is run. @@ -72,10 +66,13 @@ its documentation for details of how the script is run. - Contents of the file have the exact same rules as specifying the ``ProcessingScript`` -.. note: +.. note:: - When PreserveEvents is enabled, any rebinning done in this step will be - lost. Use the Post-Process step instead for EventWorkspaces. + When ``PreserveEvents`` is enabled and the data has not been binned + during the process step (with ``ProcessingAlgorithm``, + ``ProcessingScript``, or ``ProcessingScriptFilename``), the data + will be rebinned at the end of the step to include all events. Use + the Post-Process step instead for :ref:`EventWorkspaces <EventWorkspace>`. Data Accumulation ################# @@ -90,36 +87,31 @@ Data Accumulation - If you select ``Append``, then the spectra from each chunk will be appended to the output workspace. -A Warning About Events -###################### +.. warning:: -Beware! If you select ``PreserveEvents=True`` and your processing -keeps the data as :ref:`EventWorkspaces <EventWorkspace>`, you may end -up creating **very large** EventWorkspaces in long runs. Most plots -require re-sorting the events, which is an operation that gets much -slower as the list gets bigger (Order of :math:`N * log(N)`). This -could cause Mantid to run very slowly or to crash due to lack of -memory. + Beware! If you select ``PreserveEvents=True`` and your processing + keeps the data as :ref:`EventWorkspaces <EventWorkspace>`, you may end + up creating **very large** EventWorkspaces in long runs. Most plots + require re-sorting the events, which is an operation that gets much + slower as the list gets bigger (Order of :math:`N * log(N)`). This + could cause Mantid to run very slowly or to crash due to lack of + memory. -Additionally, the resulting EventWorkspaces produced when -``PreserveEvents=True`` will have their X values reset to a single bin with -boundaries that encompass all events currently in the workspace. This means -that any rebinning that was done during the Process step will be lost. If -custom binning is required, this should be done using the Post-Process step -described below. + It is highly recommended that early in the PostProcessing step one + uses :ref:`CompressEvents <algm-CompressEvents>` if the data is going + to remain in events. Post-Processing Step #################### -- Optionally, you can specify some processing to perform *after* - accumulation. +- Optionally, you can specify some processing to perform *after* + accumulation. - - You then need to specify the ``AccumulationWorkspace`` property. + - You then need to specify the ``AccumulationWorkspace`` property. -- Using either the ``PostProcessingAlgorithm``, - ``PostProcessingScript``, or ``PostProcessingScriptFilename`` (same - way as above), the ``AccumulationWorkspace`` is processed into the - ``OutputWorkspace`` +- Using either the ``PostProcessingAlgorithm``, ``PostProcessingScript``, + or ``PostProcessingScriptFilename`` (same way as above), the + ``AccumulationWorkspace`` is processed into the ``OutputWorkspace`` Usage ----- diff --git a/docs/source/release/v3.14.0/framework.rst b/docs/source/release/v3.14.0/framework.rst index bd6226d7a6eb00274b051e2e79d53f23a5679343..6333b037be40330909381e2e39c6a2e4a3e8f75e 100644 --- a/docs/source/release/v3.14.0/framework.rst +++ b/docs/source/release/v3.14.0/framework.rst @@ -48,7 +48,7 @@ Improvements - :ref:`CropToComponent <algm-CropToComponent>` now supports also scanning workspaces. - :ref:`SumOverlappingTubes <algm-SumOverlappingTubes>` will produce histogram data, and will not split the counts between bins by default. - :ref:`SumSpectra <algm-SumSpectra>` has an additional option, ``MultiplyBySpectra``, which controls whether or not the output spectra are multiplied by the number of bins. This property should be set to ``False`` for summing spectra as PDFgetN does. -- :ref:`Live Data <algm-StartLiveData>` for events in PreserveEvents mode now produces workspaces that have bin boundaries which encompass the total x-range (TOF) for all events across all spectra. +- :ref:`Live Data <algm-StartLiveData>` for events with ``PreserveEvents=True`` now produces workspaces that have bin boundaries which encompass the total x-range (TOF) for all events across all spectra if the data was not binned during the process step. - Bugfix in :ref:`ConvertToMatrixWorkspace <algm-ConvertToMatrixWorkspace>` with ``Workspace2D`` as the ``InputWorkspace`` not being cloned to the ``OutputWorkspace``. Added support for ragged workspaces. - :ref:`RebinToWorkspace <algm-RebinToWorkspace>` now checks if the ``WorkspaceToRebin`` and ``WorkspaceToMatch`` already have the same binning. Added support for ragged workspaces. - :ref:`GroupWorkspaces <algm-GroupWorkspaces>` supports glob patterns for matching workspaces in the ADS.