diff --git a/CMakeLists.txt b/CMakeLists.txt
index e4d2d6906b29352526f8ef24ba85c80810fd9750..7d98edd9725ba99d3c080896f475a6ab272d3e55 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -292,8 +292,6 @@ if ( ENABLE_CPACK )
       include ( WindowsNSIS )
     elseif ( ${CMAKE_SYSTEM_NAME} STREQUAL "Linux" )
       include ( CPackLinuxSetup )
-      # let people know what is coming out the other end
-      message ( STATUS "CPACK_PACKAGE_FILE_NAME = ${CPACK_PACKAGE_FILE_NAME}" )
 
       # rhel requirements
       set ( CPACK_RPM_PACKAGE_REQUIRES "qt4 >= 4.2,nexus >= 4.3.1,gsl,glibc,qwtplot3d-qt4,muParser,numpy,h5py >= 2.3.1,PyCifRW >= 4.2.1,tbb,librdkafka" )
@@ -301,12 +299,12 @@ if ( ENABLE_CPACK )
       set( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},OCE-draw,OCE-foundation,OCE-modeling,OCE-ocaf,OCE-visualization")
       set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},poco-crypto,poco-data,poco-mysql,poco-sqlite,poco-odbc,poco-util,poco-xml,poco-zip,poco-net,poco-netssl,poco-foundation,PyQt4" )
       set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},sip >= 4.18" )
-      set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},python-six,python-ipython >= 1.1.0,python-ipython-notebook,PyYAML,python2-psutil" )
+      set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},python-six,python-ipython >= 1.1.0,python-ipython-notebook,PyYAML" )
       # scipy
       set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},scipy" )
       set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES},mxml,hdf,hdf5,jsoncpp >= 0.7.0" )
 
-      if( "${UNIX_CODENAME}" MATCHES "Santiago" )
+      if( "${UNIX_CODENAME}" MATCHES "Santiago" ) # RHEL6
         # On RHEL6 we have to use an updated qscintilla to fix an auto complete bug
         set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES} qscintilla >= 2.4.6, boost157,python-matplotlib" )
         # On RHEL6 we are using SCL packages for Qt
@@ -317,7 +315,7 @@ if ( ENABLE_CPACK )
       endif()
 
       # Add software collections for RHEL
-      if ( "${UNIX_CODENAME}" MATCHES "Santiago" )
+      if ( "${UNIX_CODENAME}" MATCHES "Santiago" ) # RHEL6
         set ( CPACK_RPM_PACKAGE_REQUIRES "${CPACK_RPM_PACKAGE_REQUIRES} scl-utils" )
       endif()
 
@@ -361,8 +359,7 @@ if ( ENABLE_CPACK )
                            "libhdf5-cpp-11,"
                            "python-pycifrw (>= 4.2.1),"
                            "python-yaml,"
-                           "python-qtawesome,"
-                           "python-psutil")
+                           "python-qtawesome")
         set ( PERFTOOLS_DEB_PACKAGE "libgoogle-perftools4 (>= 1.7)" )
         if( "${UNIX_CODENAME}" STREQUAL "bionic")
             list ( APPEND DEPENDS_LIST ",libgsl23,liboce-foundation11,liboce-modeling11,libqscintilla2-qt4-13,jupyter-notebook,libhdf5-cpp-100")
@@ -386,4 +383,6 @@ if ( ENABLE_CPACK )
     ENDIF ()
     # run cpack configuration
     include ( CPack )
+    # let people know what is coming out the other end - must be after cpack generates value for rpm
+    message ( STATUS "CPACK_PACKAGE_FILE_NAME = ${CPACK_PACKAGE_FILE_NAME}" )
 endif ()
diff --git a/Framework/Algorithms/inc/MantidAlgorithms/PolarizationEfficiencyCor.h b/Framework/Algorithms/inc/MantidAlgorithms/PolarizationEfficiencyCor.h
index 11f12d56958e2fa63dfb4b6af188d79acf7acbcc..16ac8440d0ff8e8de288fb624291fd2d7ba15c3a 100644
--- a/Framework/Algorithms/inc/MantidAlgorithms/PolarizationEfficiencyCor.h
+++ b/Framework/Algorithms/inc/MantidAlgorithms/PolarizationEfficiencyCor.h
@@ -39,7 +39,7 @@ public:
   const std::string name() const override;
   int version() const override;
   const std::vector<std::string> seeAlso() const override {
-    return {"PolarizationCorrection"};
+    return {"PolarizationCorrectionFredrikze"};
   }
   const std::string category() const override;
   const std::string summary() const override;
diff --git a/Framework/Algorithms/inc/MantidAlgorithms/SampleCorrections/MayersSampleCorrection.h b/Framework/Algorithms/inc/MantidAlgorithms/SampleCorrections/MayersSampleCorrection.h
index 19719ae48c055eb4b7f33bc770d7c4225719cec8..20ee9215f24c3f0332cbedd5d6bf29c4a7d55ca5 100644
--- a/Framework/Algorithms/inc/MantidAlgorithms/SampleCorrections/MayersSampleCorrection.h
+++ b/Framework/Algorithms/inc/MantidAlgorithms/SampleCorrections/MayersSampleCorrection.h
@@ -40,7 +40,7 @@ public:
   const std::string category() const override;
   const std::string summary() const override;
   const std::vector<std::string> seeAlso() const override {
-    return {"MonteCarloAbsorption", "MultipleScatteringCylinderAbsorption"};
+    return {"MonteCarloAbsorption", "CarpenterSampleCorrection"};
   }
 
 private:
diff --git a/Framework/Algorithms/src/CalculateMuonAsymmetry.cpp b/Framework/Algorithms/src/CalculateMuonAsymmetry.cpp
index e7bc6faefa7f4468186f0000d976b4169c0c55ef..37df66050bfa80315ebd2a749e8448bb95ec612a 100644
--- a/Framework/Algorithms/src/CalculateMuonAsymmetry.cpp
+++ b/Framework/Algorithms/src/CalculateMuonAsymmetry.cpp
@@ -176,6 +176,14 @@ void CalculateMuonAsymmetry::exec() {
   // get new norm
   std::vector<double> norms =
       getNormConstants(wsNamesUnNorm); // this will do the fit
+  auto containsZeros = std::any_of(norms.begin(), norms.end(),
+                                   [](double value) { return value == 0.0; });
+  if (containsZeros) {
+
+    setProperty("OutputStatus", "Aborted, a normalization constant was zero");
+    g_log.error("Got a zero for the normalization, aborting algorithm.");
+    return;
+  }
   // update the ws to new norm
   for (size_t j = 0; j < wsNames.size(); j++) {
     API::MatrixWorkspace_sptr ws =
diff --git a/Framework/Algorithms/src/PolarizationCorrectionWildes.cpp b/Framework/Algorithms/src/PolarizationCorrectionWildes.cpp
index 9e9fb51d74c413abd4c59216f54da0355ec7f76e..12ab5645fcbb0a9037080dbc2aeb4d251ef5400b 100644
--- a/Framework/Algorithms/src/PolarizationCorrectionWildes.cpp
+++ b/Framework/Algorithms/src/PolarizationCorrectionWildes.cpp
@@ -460,28 +460,30 @@ std::map<std::string, std::string>
 PolarizationCorrectionWildes::validateInputs() {
   std::map<std::string, std::string> issues;
   API::MatrixWorkspace_const_sptr factorWS = getProperty(Prop::EFFICIENCIES);
-  const auto &factorAxis = factorWS->getAxis(1);
-  if (!factorAxis) {
-    issues[Prop::EFFICIENCIES] = "The workspace is missing a vertical axis.";
-  } else if (!factorAxis->isText()) {
-    issues[Prop::EFFICIENCIES] =
-        "The vertical axis in the workspace is not text axis.";
-  } else if (factorWS->getNumberHistograms() < 4) {
-    issues[Prop::EFFICIENCIES] =
-        "The workspace should contain at least 4 histograms.";
-  } else {
-    std::vector<std::string> tags{{"P1", "P2", "F1", "F2"}};
-    for (size_t i = 0; i != factorAxis->length(); ++i) {
-      const auto label = factorAxis->label(i);
-      auto found = std::find(tags.begin(), tags.end(), label);
-      if (found != tags.cend()) {
-        std::swap(tags.back(), *found);
-        tags.pop_back();
+  if (factorWS) {
+    const auto &factorAxis = factorWS->getAxis(1);
+    if (!factorAxis) {
+      issues[Prop::EFFICIENCIES] = "The workspace is missing a vertical axis.";
+    } else if (!factorAxis->isText()) {
+      issues[Prop::EFFICIENCIES] =
+          "The vertical axis in the workspace is not text axis.";
+    } else if (factorWS->getNumberHistograms() < 4) {
+      issues[Prop::EFFICIENCIES] =
+          "The workspace should contain at least 4 histograms.";
+    } else {
+      std::vector<std::string> tags{{"P1", "P2", "F1", "F2"}};
+      for (size_t i = 0; i != factorAxis->length(); ++i) {
+        const auto label = factorAxis->label(i);
+        auto found = std::find(tags.begin(), tags.end(), label);
+        if (found != tags.cend()) {
+          std::swap(tags.back(), *found);
+          tags.pop_back();
+        }
+      }
+      if (!tags.empty()) {
+        issues[Prop::EFFICIENCIES] = "A histogram labeled " + tags.front() +
+                                     " is missing from the workspace.";
       }
-    }
-    if (!tags.empty()) {
-      issues[Prop::EFFICIENCIES] = "A histogram labeled " + tags.front() +
-                                   " is missing from the workspace.";
     }
   }
   const std::vector<std::string> inputs = getProperty(Prop::INPUT_WS);
diff --git a/Framework/CurveFitting/inc/MantidCurveFitting/Algorithms/VesuvioCalculateMS.h b/Framework/CurveFitting/inc/MantidCurveFitting/Algorithms/VesuvioCalculateMS.h
index cdd8771b4f4de5b8077a97488bbd7dc76a1f28ad..22bf0b2d2ec8d7baeca2f64d8531364a391c804d 100644
--- a/Framework/CurveFitting/inc/MantidCurveFitting/Algorithms/VesuvioCalculateMS.h
+++ b/Framework/CurveFitting/inc/MantidCurveFitting/Algorithms/VesuvioCalculateMS.h
@@ -89,7 +89,7 @@ public:
 
   const std::vector<std::string> seeAlso() const override {
     return {"MayersSampleCorrection", "MonteCarloAbsorption",
-            "MultipleScatteringCylinderAbsorption"};
+            "CarpenterSampleCorrection"};
   }
 
 private:
diff --git a/Framework/DataHandling/src/ExtractPolarizationEfficiencies.cpp b/Framework/DataHandling/src/ExtractPolarizationEfficiencies.cpp
index b7badbe04f345a63e63e49021053742fd14a9fcd..3c16eed064a6401ec77285208155d6bf2a653032 100644
--- a/Framework/DataHandling/src/ExtractPolarizationEfficiencies.cpp
+++ b/Framework/DataHandling/src/ExtractPolarizationEfficiencies.cpp
@@ -48,12 +48,14 @@ std::vector<double> parseVector(std::string const &name,
   return result;
 }
 
-MatrixWorkspace_sptr createWorkspace(std::vector<double> const &x,
-                                     std::vector<double> const &y) {
+MatrixWorkspace_sptr
+createWorkspace(std::vector<double> const &x, std::vector<double> const &y,
+                std::vector<double> const &e = std::vector<double>()) {
   Points xVals(x);
   Counts yVals(y);
+  CountStandardDeviations eVals(e.empty() ? std::vector<double>(y.size()) : e);
   auto retVal = boost::make_shared<Workspace2D>();
-  retVal->initialize(1, Histogram(xVals, yVals));
+  retVal->initialize(1, Histogram(xVals, yVals, eVals));
   return retVal;
 }
 
@@ -126,7 +128,7 @@ void ExtractPolarizationEfficiencies::exec() {
   auto alg = createChildAlgorithm("JoinISISPolarizationEfficiencies");
   auto const &efficiencies = EFFICIENCIES.at(method);
   for (auto const &name : efficiencies) {
-    auto const propValue = instrument->getParameterAsString(name);
+    auto propValue = instrument->getParameterAsString(name);
     if (propValue.empty()) {
       throw std::invalid_argument("Parameter " + name +
                                   " is missing from the correction parameters");
@@ -134,12 +136,17 @@ void ExtractPolarizationEfficiencies::exec() {
     auto const prop = parseVector(name, propValue);
     if (lambda.size() != prop.size()) {
       throw std::runtime_error("Instrument vector parameter \"" + name +
-                               "\" is expeced to be the same size as \"" +
+                               "\" is expected to be the same size as \"" +
                                LAMBDA_PARAMETER + "\" but " +
                                std::to_string(prop.size()) + " != " +
                                std::to_string(lambda.size()));
     }
-    auto ws = createWorkspace(lambda, prop);
+    auto const errorName = name + "_Errors";
+    propValue = instrument->getParameterAsString(errorName);
+    auto const errorProp = propValue.empty()
+                               ? std::vector<double>()
+                               : parseVector(errorName, propValue);
+    auto ws = createWorkspace(lambda, prop, errorProp);
     alg->setProperty(name, ws);
   }
   alg->execute();
diff --git a/Framework/DataHandling/test/ExtractPolarizationEfficienciesTest.h b/Framework/DataHandling/test/ExtractPolarizationEfficienciesTest.h
index b2666930a36c5725ec08ebd50c35bd98a92a2ac1..8baaf227f53e836fc1afd18a51a584e9981fc9c8 100644
--- a/Framework/DataHandling/test/ExtractPolarizationEfficienciesTest.h
+++ b/Framework/DataHandling/test/ExtractPolarizationEfficienciesTest.h
@@ -210,20 +210,40 @@ public:
     TS_ASSERT_DELTA(outWS->y(0)[2], 0.993, 1e-14);
     TS_ASSERT_DELTA(outWS->y(0)[3], 0.994, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(0)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[3], 0.0, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(1)[0], 0.981, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[1], 0.982, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[2], 0.983, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[3], 0.984, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(1)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[3], 0.0, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(2)[0], 0.971, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[1], 0.972, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[2], 0.973, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[3], 0.974, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(2)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[3], 0.0, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(3)[0], 0.961, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[1], 0.962, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[2], 0.963, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[3], 0.964, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(3)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[3], 0.0, 1e-14);
   }
 
   void test_Wildes() {
@@ -261,20 +281,111 @@ public:
     TS_ASSERT_DELTA(outWS->y(0)[2], 0.993, 1e-14);
     TS_ASSERT_DELTA(outWS->y(0)[3], 0.994, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(0)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[3], 0.0, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->y(1)[0], 0.981, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(1)[1], 0.982, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(1)[2], 0.983, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(1)[3], 0.984, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(1)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[3], 0.0, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->y(2)[0], 0.971, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(2)[1], 0.972, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(2)[2], 0.973, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(2)[3], 0.974, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(2)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[3], 0.0, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->y(3)[0], 0.961, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(3)[1], 0.962, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(3)[2], 0.963, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(3)[3], 0.964, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(3)[0], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[1], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[2], 0.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[3], 0.0, 1e-14);
+  }
+
+  void test_Wildes_errors() {
+    auto workspace = createInputWorkspace("Wildes", "1 2 3 4", false, true);
+
+    ExtractPolarizationEfficiencies alg;
+    alg.initialize();
+    alg.setChild(true);
+    alg.setRethrows(true);
+    alg.setProperty("InputWorkspace", workspace);
+    alg.setProperty("OutputWorkspace", "dummy");
+    alg.execute();
+    MatrixWorkspace_sptr outWS = alg.getProperty("OutputWorkspace");
+
+    TS_ASSERT(outWS);
+    TS_ASSERT_EQUALS(outWS->getNumberHistograms(), 4);
+    TS_ASSERT_EQUALS(outWS->blocksize(), 4);
+    TS_ASSERT_EQUALS(outWS->getAxis(0)->unit()->caption(), "Wavelength");
+
+    auto axis1 = outWS->getAxis(1);
+    TS_ASSERT_EQUALS(axis1->label(0), "P1");
+    TS_ASSERT_EQUALS(axis1->label(1), "P2");
+    TS_ASSERT_EQUALS(axis1->label(2), "F1");
+    TS_ASSERT_EQUALS(axis1->label(3), "F2");
+
+    TS_ASSERT(!outWS->isHistogramData());
+
+    TS_ASSERT_DELTA(outWS->x(0)[0], 1.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->x(0)[1], 2.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->x(0)[2], 3.0, 1e-14);
+    TS_ASSERT_DELTA(outWS->x(0)[3], 4.0, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->y(0)[0], 0.991, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(0)[1], 0.992, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(0)[2], 0.993, 1e-14);
+    TS_ASSERT_DELTA(outWS->y(0)[3], 0.994, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(0)[0], 0.1, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[1], 0.2, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[2], 0.3, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(0)[3], 0.4, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(1)[0], 0.981, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[1], 0.982, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[2], 0.983, 1e-14);
     TS_ASSERT_DELTA(outWS->y(1)[3], 0.984, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(1)[0], 0.11, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[1], 0.21, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[2], 0.31, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(1)[3], 0.41, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(2)[0], 0.971, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[1], 0.972, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[2], 0.973, 1e-14);
     TS_ASSERT_DELTA(outWS->y(2)[3], 0.974, 1e-14);
 
+    TS_ASSERT_DELTA(outWS->e(2)[0], 0.12, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[1], 0.22, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[2], 0.32, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(2)[3], 0.42, 1e-14);
+
     TS_ASSERT_DELTA(outWS->y(3)[0], 0.961, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[1], 0.962, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[2], 0.963, 1e-14);
     TS_ASSERT_DELTA(outWS->y(3)[3], 0.964, 1e-14);
+
+    TS_ASSERT_DELTA(outWS->e(3)[0], 0.13, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[1], 0.23, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[2], 0.33, 1e-14);
+    TS_ASSERT_DELTA(outWS->e(3)[3], 0.43, 1e-14);
   }
 
   void test_loading_from_file() {
@@ -392,7 +503,7 @@ public:
     alg.setProperty("OutputWorkspace", "dummy");
     TS_ASSERT_THROWS_EQUALS(
         alg.execute(), std::runtime_error & e, std::string(e.what()),
-        "Instrument vector parameter \"P1\" is expeced to be the same size as "
+        "Instrument vector parameter \"P1\" is expected to be the same size as "
         "\"efficiency_lambda\" but 4 != 3");
   }
 
@@ -410,7 +521,7 @@ private:
   MatrixWorkspace_sptr
   createInputWorkspace(std::string const &method,
                        std::string const &lambda = "1 2 3 4",
-                       bool skipP1 = false) {
+                       bool skipP1 = false, bool loadErrors = false) {
     auto workspace = createPointWS(1, 0, 10);
     auto pmap = boost::make_shared<ParameterMap>();
     auto instrument = boost::make_shared<Instrument>();
@@ -430,10 +541,18 @@ private:
                       "00,01,10,11");
       if (!skipP1) {
         pmap->addString(instrument.get(), "P1", "0.991 0.992 0.993 0.994");
+        if (loadErrors) {
+          pmap->addString(instrument.get(), "P1_Errors", "0.1 0.2 0.3 0.4");
+        }
       }
       pmap->addString(instrument.get(), "P2", "0.981 0.982 0.983 0.984");
       pmap->addString(instrument.get(), "F1", "0.971 0.972 0.973 0.974");
       pmap->addString(instrument.get(), "F2", "0.961 0.962 0.963 0.964");
+      if (loadErrors) {
+        pmap->addString(instrument.get(), "P2_Errors", "0.11 0.21 0.31 0.41");
+        pmap->addString(instrument.get(), "F1_Errors", "0.12 0.22 0.32 0.42");
+        pmap->addString(instrument.get(), "F2_Errors", "0.13 0.23 0.33 0.43");
+      }
     }
 
     instrument = boost::make_shared<Instrument>(instrument, pmap);
diff --git a/Framework/Muon/inc/MantidMuon/ApplyMuonDetectorGroupPairing.h b/Framework/Muon/inc/MantidMuon/ApplyMuonDetectorGroupPairing.h
index 4be08406b1ce2e614a6c215c0aac7c6b6174056f..ad888295791aa32258af724721d685184777c0a8 100644
--- a/Framework/Muon/inc/MantidMuon/ApplyMuonDetectorGroupPairing.h
+++ b/Framework/Muon/inc/MantidMuon/ApplyMuonDetectorGroupPairing.h
@@ -56,8 +56,7 @@ public:
   }
   /// See also
   const std::vector<std::string> seeAlso() const override {
-    return {"MuonProcess", "ApplyMuonDetectorGrouping",
-            "LoadAndApplyMuonDetectorGrouping"};
+    return {"MuonProcess", "ApplyMuonDetectorGrouping"};
   }
   /// Perform validation of inputs to the algorithm
   std::map<std::string, std::string> validateInputs() override;
diff --git a/Framework/Muon/src/ConvertFitFunctionForMuonTFAsymmetry.cpp b/Framework/Muon/src/ConvertFitFunctionForMuonTFAsymmetry.cpp
index 5493be007eaa75854550154eedc89cf21563844e..292bd536abb3c0624290e3ce7ea8730042a2615f 100644
--- a/Framework/Muon/src/ConvertFitFunctionForMuonTFAsymmetry.cpp
+++ b/Framework/Muon/src/ConvertFitFunctionForMuonTFAsymmetry.cpp
@@ -248,15 +248,23 @@ IFunction_sptr ConvertFitFunctionForMuonTFAsymmetry::extractUserFunction(
     const IFunction_sptr &TFFuncIn) {
   // N(1+g) + exp
   auto TFFunc = boost::dynamic_pointer_cast<CompositeFunction>(TFFuncIn);
-
+  if (TFFunc == nullptr) {
+    throw std::runtime_error("Input function is not of the correct form");
+  }
   // getFunction(0) -> N(1+g)
 
   TFFunc =
       boost::dynamic_pointer_cast<CompositeFunction>(TFFunc->getFunction(0));
+  if (TFFunc == nullptr) {
+    throw std::runtime_error("Input function is not of the correct form");
+  }
   // getFunction(1) -> 1+g
 
   TFFunc =
       boost::dynamic_pointer_cast<CompositeFunction>(TFFunc->getFunction(1));
+  if (TFFunc == nullptr) {
+    throw std::runtime_error("Input function is not of the correct form");
+  }
   // getFunction(1) -> g
   return TFFunc->getFunction(1);
 }
diff --git a/Framework/PythonInterface/mantid/plots/helperfunctions.py b/Framework/PythonInterface/mantid/plots/helperfunctions.py
index e418f85d238be750b55e9fbc11803beb8d9522fa..d2938857600d9fb7c95e52de0edcfe3aa9ef5a19 100644
--- a/Framework/PythonInterface/mantid/plots/helperfunctions.py
+++ b/Framework/PythonInterface/mantid/plots/helperfunctions.py
@@ -42,6 +42,7 @@ def get_distribution(workspace, **kwargs):
     """
     Determine whether or not the data is a distribution. The value in
     the kwargs wins. Applies to Matrix workspaces only
+
     :param workspace: :class:`mantid.api.MatrixWorkspace` to extract the data from
     """
     distribution = kwargs.pop('distribution', workspace.isDistribution())
@@ -53,6 +54,7 @@ def get_normalization(md_workspace, **kwargs):
     Gets the normalization flag of an MDHistoWorkspace. For workspaces
     derived similar to MSlice/Horace, one needs to average data, the so-called
     "number of events" normalization.
+
     :param md_workspace: :class:`mantid.api.IMDHistoWorkspace` to extract the data from
     """
     normalization = kwargs.pop('normalization', md_workspace.displayNormalizationHisto())
@@ -62,6 +64,7 @@ def get_normalization(md_workspace, **kwargs):
 def points_from_boundaries(input_array):
     """
     The function returns bin centers from bin boundaries
+
     :param input_array: a :class:`numpy.ndarray` of bin boundaries
     """
     assert isinstance(input_array, numpy.ndarray), 'Not a numpy array'
@@ -73,6 +76,7 @@ def points_from_boundaries(input_array):
 def _dim2array(d):
     """
     Create a numpy array containing bin centers along the dimension d
+
     :param d: an :class:`mantid.geometry.IMDDimension` object
 
     returns: bin boundaries for dimension d
@@ -138,12 +142,14 @@ def get_md_data1d(workspace, normalization):
 def get_md_data(workspace, normalization, withError=False):
     """
     Generic function to extract data from an MDHisto workspace
+
     :param workspace: :class:`mantid.api.IMDHistoWorkspace` containing data
     :param normalization: if :class:`mantid.api.MDNormalization.NumEventsNormalization`
         it will divide intensity by the number of corresponding MDEvents
+    :param withError: flag for if the error is calculated. If False, err is returned as None
+
     returns a tuple containing bin boundaries for each dimension, the (maybe normalized)
     signal and error arrays
-    :param withError: flag for if the error is calculated. If False, err is returned as None
     """
     dims = workspace.getNonIntegratedDimensions()
     dim_arrays = [_dim2array(d) for d in dims]
@@ -169,6 +175,7 @@ def get_md_data(workspace, normalization, withError=False):
 def get_spectrum(workspace, wkspIndex, distribution, withDy=False, withDx=False):
     """
     Extract a single spectrum and process the data into a frequency
+
     :param workspace: a Workspace2D or an EventWorkspace
     :param wkspIndex: workspace index
     :param distribution: flag to divide the data by bin width. It happens only
@@ -176,9 +183,11 @@ def get_spectrum(workspace, wkspIndex, distribution, withDy=False, withDx=False)
         the mantid configuration is set up to divide such workspaces by bin
         width. The same effect can be obtained by running the
         :ref:`algm-ConvertToDistribution` algorithm
+
     :param withDy: if True, it will return the error in the "counts", otherwise None
     :param with Dx: if True, and workspace has them, it will return errors
         in the x coordinate, otherwise None
+
     Note that for workspaces containing bin boundaries, this function will return
     the bin centers for x.
     To be used in 1D plots (plot, scatter, errorbar)
@@ -210,7 +219,8 @@ def get_md_data2d_bin_bounds(workspace, normalization):
     Function to transform data in an MDHisto workspace with exactly
     two non-integrated dimension into arrays of bin boundaries in each
     dimension, and data. To be used in 2D plots (pcolor, pcolorfast, pcolormesh)
-    Note return coordinates are 1d vectors. Use numpy.meshgrid to generate 2d versions
+
+    Note: return coordinates are 1d vectors. Use numpy.meshgrid to generate 2d versions
     """
     coordinate, data, _ = get_md_data(workspace, normalization, withError=False)
     assert len(coordinate) == 2, 'The workspace is not 2D'
@@ -223,7 +233,8 @@ def get_md_data2d_bin_centers(workspace, normalization):
     two non-integrated dimension into arrays of bin centers in each
     dimension, and data. To be used in 2D plots (contour, contourf,
     tricontour, tricontourf, tripcolor)
-    Note return coordinates are 1d vectors. Use numpy.meshgrid to generate 2d versions
+
+    Note: return coordinates are 1d vectors. Use numpy.meshgrid to generate 2d versions
     """
     x, y, data = get_md_data2d_bin_bounds(workspace, normalization)
     x = points_from_boundaries(x)
@@ -234,6 +245,7 @@ def get_md_data2d_bin_centers(workspace, normalization):
 def boundaries_from_points(input_array):
     """"
     The function tries to guess bin boundaries from bin centers
+
     :param input_array: a :class:`numpy.ndarray` of bin centers
     """
     assert isinstance(input_array, numpy.ndarray), 'Not a numpy array'
@@ -257,12 +269,15 @@ def get_matrix_2d_data(workspace, distribution, histogram2D=False):
     '''
     Get all data from a Matrix workspace that has the same number of bins
     in every spectrum. It is used for 2D plots
+
     :param workspace: Matrix workspace to extract the data from
     :param distribution: if False, and the workspace contains histogram data,
         the intensity will be divided by the x bin width
+
     :param histogram2D: flag that specifies if the coordinates in the output are
         -bin centers (such as for contour) for False, or
         -bin edges (such as for pcolor) for True.
+
     Returns x,y,z 2D arrays
     '''
     try:
@@ -305,9 +320,11 @@ def get_uneven_data(workspace, distribution):
     '''
     Function to get data for uneven workspace2Ds, such as
     that pcolor, pcolorfast, and pcolormesh will plot axis aligned rectangles
+
     :param workspace: a workspace2d
     :param distribution: if False, and the workspace contains histogram data,
         the intensity will be divided by the x bin width
+
     Returns three lists. Each element in the x list is an array of boundaries
     for a spectra. Each element in the y list is a 2 element array with the extents
     of a particular spectra. The z list contains arrays of intensities at bin centers
@@ -340,7 +357,9 @@ def get_data_uneven_flag(workspace, **kwargs):
     :meth:`matplotlib.axes.Axes.pcolorfast`, and :meth:`matplotlib.axes.Axes.pcolormesh`
     to plot rectangles parallel to the axes even if the data is not
     on a regular grid.
+
     :param workspace: a workspace2d
+
     if axisaligned keyword is available and True or if the workspace does
     not have a constant number of bins, it will return true, otherwise false
     '''
@@ -396,6 +415,7 @@ def get_axes_labels(workspace):
     Returns a tuple. The first element is the quantity label, such as "Intensity" or "Counts".
     All other elements in the tuple are labels for axes.
     Some of them are latex formatted already.
+
     :param workspace: :class:`mantid.api.MatrixWorkspace` or :class:`mantid.api.IMDHistoWorkspace`
     """
     if isinstance(workspace, MDHistoWorkspace):
diff --git a/Framework/PythonInterface/plugins/algorithms/ConvertWANDSCDtoQ.py b/Framework/PythonInterface/plugins/algorithms/ConvertWANDSCDtoQ.py
index 42a5df51e70a544c00209d86462da58095c6c957..12580851ccaa1e799f64f989d9337042236dab92 100644
--- a/Framework/PythonInterface/plugins/algorithms/ConvertWANDSCDtoQ.py
+++ b/Framework/PythonInterface/plugins/algorithms/ConvertWANDSCDtoQ.py
@@ -290,7 +290,9 @@ class ConvertWANDSCDtoQ(PythonAlgorithm):
             createWS_alg.execute()
             mtd.addOrReplace(self.getPropertyValue("OutputWorkspace")+'_normalization', createWS_alg.getProperty("OutputWorkspace").value)
 
-        output /= output_norm
+        old_settings = np.seterr(divide='ignore', invalid='ignore') # Ignore RuntimeWarning: invalid value encountered in true_divide
+        output /= output_norm # We often divide by zero here and we get NaN's, this is desired behaviour
+        np.seterr(**old_settings)
 
         progress.report('Creating MDHistoWorkspace')
         createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace", enableLogging=False)
diff --git a/MantidPlot/CMakeLists.txt b/MantidPlot/CMakeLists.txt
index b5b9670e1c502cfef2537ab1deba3d5c29ab58ef..f63b812a7c48edd9934aaccd95c9189795b4eec3 100644
--- a/MantidPlot/CMakeLists.txt
+++ b/MantidPlot/CMakeLists.txt
@@ -814,6 +814,10 @@ target_link_libraries ( MantidPlot LINK_PRIVATE
   ${OPENGL_glu_LIBRARY}
   ${OPENGL_gl_LIBRARY}
 )
+if (WIN32)
+  set_target_properties ( MantidPlot PROPERTIES COMPILE_DEFINITIONS "PSAPI_VERSION=1" )
+  target_link_libraries ( MantidPlot PRIVATE Psapi.lib )
+endif ()
 
 if(MAKE_VATES)
   target_include_directories( MantidPlot SYSTEM PRIVATE ${PARAVIEW_INCLUDE_DIRS} )
diff --git a/MantidPlot/make_package.rb.in b/MantidPlot/make_package.rb.in
index 141c681498d653d20958c0d303b9bd8437c2dc35..c19e0b216c217df5f7e9661f607c0f7a7ac21d7c 100755
--- a/MantidPlot/make_package.rb.in
+++ b/MantidPlot/make_package.rb.in
@@ -325,7 +325,7 @@ end
 #currently missing epics
 path = "/Library/Python/2.7/site-packages"
 directories = ["sphinx","sphinx_bootstrap_theme","IPython","zmq","pygments","backports", "qtawesome", "qtpy",
-               "certifi","tornado","markupsafe","jinja2","psutil","jsonschema","functools32","ptyprocess","CifFile","yaml"]
+               "certifi","tornado","markupsafe","jinja2","jsonschema","functools32","ptyprocess","CifFile","yaml"]
 directories.each do |directory|
   addPythonLibrary("#{path}/#{directory}","Contents/MacOS/")
 end
diff --git a/MantidPlot/src/ApplicationWindow.cpp b/MantidPlot/src/ApplicationWindow.cpp
index 44d7eae1edbf1091bdcc14113d3b4bb56f0568bc..8ad7f5a89f3b8777b81fb93a29f2cfe2f4ac87cd 100644
--- a/MantidPlot/src/ApplicationWindow.cpp
+++ b/MantidPlot/src/ApplicationWindow.cpp
@@ -15964,6 +15964,8 @@ void ApplicationWindow::customMultilayerToolButtons(MultiLayer *w) {
     return;
   }
 
+  btnMultiPeakPick->setEnabled(w->layers() == 1);
+
   Graph *g = w->activeGraph();
   if (g) {
     PlotToolInterface *tool = g->activeTool();
@@ -16644,6 +16646,7 @@ void ApplicationWindow::onAboutToStart() {
   // instance currently running
   try {
     if (!Process::isAnotherInstanceRunning()) {
+      g_log.debug("Starting project autosaving.");
       checkForProjectRecovery();
     } else {
       g_log.debug("Another MantidPlot process is running. Project recovery is "
diff --git a/MantidPlot/src/Mantid/MantidUI.cpp b/MantidPlot/src/Mantid/MantidUI.cpp
index a67ed40590ca12365844b06f30f9ab1317ab21da..f6fc450f085c72a4f4579bf2b31fcc5f8889c710 100644
--- a/MantidPlot/src/Mantid/MantidUI.cpp
+++ b/MantidPlot/src/Mantid/MantidUI.cpp
@@ -1270,6 +1270,11 @@ Table *MantidUI::createDetectorTable(
     const std::vector<int> &indices, bool include_data) {
   using namespace Mantid::Geometry;
 
+  IComponent_const_sptr sample = ws->getInstrument()->getSample();
+  if (!sample) {
+    return nullptr;
+  }
+
   // check if efixed value is available
   bool calcQ(true);
 
@@ -1330,7 +1335,6 @@ Table *MantidUI::createDetectorTable(
   t->setTextFormat(ncols - 1);
 
   // Cache some frequently used values
-  IComponent_const_sptr sample = ws->getInstrument()->getSample();
   const auto beamAxisIndex =
       ws->getInstrument()->getReferenceFrame()->pointingAlongBeam();
   const auto sampleDist = sample->getPos()[beamAxisIndex];
diff --git a/MantidPlot/src/Process.cpp b/MantidPlot/src/Process.cpp
index 20fc6d3ead9885a397efa48c784093a72ef7f3b9..b9dc2d4aec4a298a9c6da4f8b3951ba1745de8b7 100644
--- a/MantidPlot/src/Process.cpp
+++ b/MantidPlot/src/Process.cpp
@@ -1,85 +1,35 @@
-// clang-format off
-#include "MantidQtWidgets/Common/PythonThreading.h"
-// clang-format on
-
 #include "Process.h"
 
-#include <iostream>
-#include <stdexcept>
 #include <QCoreApplication>
+#include <QFileInfo>
+
+#if defined(Q_OS_LINUX)
+#include <QDir>
+#include <QFile>
+#elif defined(Q_OS_WIN)
+#define WIN32_LEAN_AND_MEAN
+#include <Psapi.h>
+#include "MantidQtWidgets/Common/QStringUtils.h"
+#elif defined(Q_OS_MAC)
+#include <libproc.h>
+#include <sys/sysctl.h>
+#endif
 
 namespace {
 
-class PyObjectNewReference {
-public:
-  explicit PyObjectNewReference(PyObject *object) : m_object(object) {}
-  ~PyObjectNewReference() { Py_XDECREF(m_object); }
-
-  PyObjectNewReference(const PyObjectNewReference &) = delete;
-  PyObjectNewReference &operator=(const PyObjectNewReference &) = delete;
-
-  PyObjectNewReference(PyObjectNewReference &&o) { *this = std::move(o); }
-
-  PyObjectNewReference &operator=(PyObjectNewReference &&other) {
-    this->m_object = other.m_object;
-    other.m_object = nullptr;
-    return *this;
-  }
-
-  inline PyObject *ptr() const { return m_object; }
-
-private:
-  PyObject *m_object;
-};
-
-/**
- * @brief Retrieve a named attribute
- * @param source The source object
- * @param name The name of the attribute
- * @return The attribute
- * @throws std::runtime_error if an error occurs retrieving the attribute
- */
-PyObjectNewReference attr(PyObject *source, const char *name) {
-  PyObjectNewReference attr(PyObject_GetAttrString(source, name));
-  if (attr.ptr()) {
-    return attr;
-  } else {
-    PyErr_Print();
-    throw std::runtime_error(std::string("Process: No attribute ") + name +
-                             " found");
-  }
-}
-
-/**
- * @brief Call a named function with an check for errors
- * @param source The source object
- * @param name The name of the attribute to call
- * @return The return value of the function
- * @throws std::runtime_error if an error occurs retrieving the attribute
- */
-PyObjectNewReference call(PyObject *source, const char *name) {
-  auto returnedAttr = attr(source, name);
-  auto result = PyObject_CallFunction(returnedAttr.ptr(), nullptr);
-  if (result)
-    return PyObjectNewReference(result);
-  else {
-    PyErr_Print();
-    throw std::runtime_error(std::string("Process: Error calling function ") +
-                             name);
-  }
+bool isOtherInstance(int64_t otherPID, QString otherExeName) {
+  static const int64_t ourPID(QCoreApplication::applicationPid());
+  if (otherPID == ourPID)
+    return false;
+
+  static const QString ourExeName(
+      QFileInfo(QCoreApplication::applicationFilePath()).fileName());
+  if (ourExeName == otherExeName)
+    return true;
+  else
+    return false;
 }
 
-/**
- * @return Return a pointer to the psutil module. A new reference is returned.
- */
-PyObjectNewReference psutil() {
-  if (auto process = PyImport_ImportModule("psutil")) {
-    return PyObjectNewReference(process);
-  } else {
-    PyErr_Clear();
-    throw std::runtime_error("Python module psutil cannot be imported.");
-  }
-}
 } // namespace
 
 namespace Process {
@@ -88,38 +38,148 @@ namespace Process {
   * Returns true is another instance of Mantid is running
   * on this machine
   * @return True if another instance is running
-  * @throws std::runtime_error if the PID list cannot be determined
+  * @throws std::runtime_error if this cannot be determined
   */
-bool isAnotherInstanceRunning() { return !otherInstancePIDs().empty(); }
+#ifdef Q_OS_LINUX
+bool isAnotherInstanceRunning() {
+  // Inspired by psutil._pslinux.Process.exe:
+  // https://github.com/giampaolo/psutil/blob/master/psutil/_pslinux.py
+  QDir procfs{"/proc"};
+
+  bool otherIsRunning(false);
+  const QStringList entries{procfs.entryList(QDir::Dirs)};
+  for (const auto &pidStr : entries) {
+    bool isDigit(false);
+    const long long pid{pidStr.toLongLong(&isDigit)};
+    if (!isDigit)
+      continue;
+
+    // /proc/pid/exe should point to executable
+    QFileInfo exe{"/proc/" + pidStr + "/exe"};
+    if (!exe.exists() || !exe.isSymLink())
+      continue;
+
+    if (isOtherInstance(pid, QFileInfo(exe.symLinkTarget()).fileName())) {
+      otherIsRunning = true;
+      break;
+    }
+  }
+  return otherIsRunning;
+}
+#elif defined(Q_OS_WIN)
+bool isAnotherInstanceRunning() {
+  using MantidQt::API::toQStringInternal;
+  // Inspired by psutil.psutil_get_pids at
+  // https://github.com/giampaolo/psutil/blob/master/psutil/arch/windows/process_info.c
+
+  // EnumProcesses in Win32 SDK says the only way to know if our process array
+  // wasn't large enough is to check the returned size and make
+  // sure that it doesn't match the size of the array.
+  // If it does we allocate a larger array and try again
+
+  std::vector<DWORD> processes;
+  // Stores the byte size of the returned array from EnumProcesses
+  DWORD enumReturnSz{0};
+  do {
+    processes.resize(processes.size() + 1024);
+    const DWORD procArrayByteSz =
+        static_cast<DWORD>(processes.size()) * sizeof(DWORD);
+    if (!EnumProcesses(processes.data(), procArrayByteSz, &enumReturnSz)) {
+      throw std::runtime_error("Unable to determine running process list");
+    }
+  } while (enumReturnSz == processes.size());
+  // Set the vector back to the appropriate size
+  processes.resize(enumReturnSz / sizeof(DWORD));
+
+  bool otherIsRunning(false);
+  wchar_t exe[MAX_PATH];
+  for (const auto pid : processes) {
+    // system-idle process
+    if (pid == 0)
+      continue;
+    auto procHandle = OpenProcess(PROCESS_QUERY_INFORMATION, FALSE, pid);
+    if (!procHandle)
+      continue;
+    DWORD exeSz = GetProcessImageFileNameW(procHandle, exe, MAX_PATH);
+    CloseHandle(procHandle);
+    if (exeSz > 0 &&
+        isOtherInstance(pid, QFileInfo(toQStringInternal(exe)).fileName())) {
+      otherIsRunning = true;
+      break;
+    }
+  }
+  return otherIsRunning;
+}
 
-/**
- * @brief Return a list of process IDs for other instances of this process.
- * @return A list of other processes running. The PID for this process is
- * removed from the list. An empty list is returned
- * if no other processes are running.
- * @throws std::runtime_error if the PID list cannot be determined
- */
-std::vector<int64_t> otherInstancePIDs() {
-  ScopedPythonGIL lock;
-  const int64_t ourPID(QCoreApplication::applicationPid());
-  const PyObjectNewReference ourName(
-      FROM_CSTRING(QCoreApplication::applicationName().toLatin1().data()));
-  auto psutilModule(psutil());
-  auto processIter(call(psutilModule.ptr(), "process_iter"));
-
-  std::vector<int64_t> otherPIDs;
-  PyObject *item(nullptr);
-  while ((item = PyIter_Next(processIter.ptr()))) {
-    auto name = call(item, "name");
-    if (PyObject_RichCompareBool(name.ptr(), ourName.ptr(), Py_EQ)) {
-      auto pid = PyLong_AsLong(attr(item, "pid").ptr());
-      if (pid != ourPID) {
-        otherPIDs.emplace_back(pid);
-      }
+#elif defined(Q_OS_MAC)
+bool isAnotherInstanceRunning() {
+  kinfo_proc *processes[] = {nullptr};
+  size_t processesLength(0);
+  int sysctlQuery[3] = {CTL_KERN, KERN_PROC, KERN_PROC_ALL};
+  /*
+   * We start by calling sysctl with ptr == NULL and size == 0.
+   * That will succeed, and set size to the appropriate length.
+   * We then allocate a buffer of at least that size and call
+   * sysctl with that buffer.  If that succeeds, we're done.
+   * If that call fails with ENOMEM, we throw the buffer away
+   * and try again.
+   * Note that the loop calls sysctl with NULL again.  This is
+   * is necessary because the ENOMEM failure case sets size to
+   * the amount of data returned, not the amount of data that
+   * could have been returned.
+   */
+  int attempts = 8; // An arbitrary number of attempts to try
+  void *memory{nullptr};
+  while (attempts-- > 0) {
+    size_t size = 0;
+    if (sysctl((int *)sysctlQuery, 3, NULL, &size, NULL, 0) == -1) {
+      throw std::runtime_error("Unable to retrieve process list");
+    }
+    const size_t size2 =
+        size + (size >> 3); // add some to cover more popping in
+    if (size2 > size) {
+      memory = malloc(size2);
+      if (memory == nullptr)
+        memory = malloc(size);
+      else
+        size = size2;
+    } else {
+      memory = malloc(size);
+    }
+    if (memory == nullptr)
+      throw std::runtime_error(
+          "Unable to allocate memory to retrieve process list");
+    if (sysctl((int *)sysctlQuery, 3, memory, &size, NULL, 0) == -1) {
+      free(memory);
+      throw std::runtime_error("Unable to retrieve process list");
+    } else {
+      *processes = (kinfo_proc *)memory;
+      processesLength = size / sizeof(kinfo_proc);
+      break;
+    }
+  }
+
+  kinfo_proc *processListBegin = processes[0];
+  kinfo_proc *processIter = processListBegin;
+  char exePath[PATH_MAX];
+  auto otherIsRunning = false;
+  for (size_t i = 0; i < processesLength; ++i) {
+    const auto pid = processIter->kp_proc.p_pid;
+    if (proc_pidpath(pid, exePath, PATH_MAX) <= 0) {
+      // assume process is dead...
+      continue;
     }
-    Py_DECREF(item);
+    if (isOtherInstance(pid,
+                        QFileInfo(QString::fromAscii(exePath)).fileName())) {
+      otherIsRunning = true;
+      break;
+    }
+    processIter++;
   }
-  return otherPIDs;
+  free(processListBegin);
+
+  return otherIsRunning;
 }
+#endif
 
 } // namespace Process
diff --git a/MantidPlot/src/Process.h b/MantidPlot/src/Process.h
index e7a467a8cd4258741bfc8a94ea1480e0f66ef530..a34de63c26a7d809ca9b9efb4359f3150210f088 100644
--- a/MantidPlot/src/Process.h
+++ b/MantidPlot/src/Process.h
@@ -22,18 +22,13 @@ along with this program.  If not, see <http://www.gnu.org/licenses/>.
 File change history is stored at: <https://github.com/mantidproject/mantid>
 Code Documentation is available at: <http://doxygen.mantidproject.org>
 */
-#include <cstdint>
-#include <vector>
 
 /*
- * A minimal wrapper around Python's psutil package to gather information
- * about processes
+ * Access information about the process and others
  */
 
 namespace Process {
 
 bool isAnotherInstanceRunning();
-
-std::vector<int64_t> otherInstancePIDs();
 }
 #endif // PROCESS_H_
diff --git a/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py b/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py
index e7b828dfe5835d6ac248f162103e6fd9e19be490..eafaa00637e80403882ca22a1766d0c535119ab4 100644
--- a/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py
+++ b/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py
@@ -849,10 +849,12 @@ class IRISIqtAndIqtFit(ISISIndirectInelasticIqtAndIqtFit):
     def get_reference_files(self):
         self.tolerance = 1e-3
         ref_files = ['II.IRISFury.nxs']
-        if platform.system() == "Darwin" or platform.linux_distribution()[0] == "Ubuntu":
-            ref_files += ['II.IRISFuryFitSeq.nxs']
+        # gsl v2 gives a slightly different result than v1
+        # we could do with a better check than this
+        if platform.linux_distribution()[0] == "Ubuntu":
+            ref_files += ['II.IRISFuryFitSeq_gslv2.nxs']
         else:
-            ref_files += ['II.IRISFuryFitSeq_win.nxs']
+            ref_files += ['II.IRISFuryFitSeq_gslv1.nxs']
         return ref_files
 
 #==============================================================================
diff --git a/Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_win.nxs.md5 b/Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_gslv1.nxs.md5
similarity index 100%
rename from Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_win.nxs.md5
rename to Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_gslv1.nxs.md5
diff --git a/Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq.nxs.md5 b/Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_gslv2.nxs.md5
similarity index 100%
rename from Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq.nxs.md5
rename to Testing/SystemTests/tests/analysis/reference/II.IRISFuryFitSeq_gslv2.nxs.md5
diff --git a/buildconfig/CMake/CPackLinuxSetup.cmake b/buildconfig/CMake/CPackLinuxSetup.cmake
index f35b586ec25e460559b5feece5fc45979296a8ec..6c4b2e35bea9ce058c7ffc93a77d143499cfe586 100644
--- a/buildconfig/CMake/CPackLinuxSetup.cmake
+++ b/buildconfig/CMake/CPackLinuxSetup.cmake
@@ -37,34 +37,19 @@ if ( "${UNIX_DIST}" MATCHES "RedHatEnterprise" OR "${UNIX_DIST}" MATCHES "Fedora
   find_program ( RPMBUILD_CMD rpmbuild )
   if ( RPMBUILD_CMD )
     set ( CPACK_GENERATOR "RPM" )
+
     set ( CPACK_RPM_PACKAGE_ARCHITECTURE "${CMAKE_SYSTEM_PROCESSOR}" )
     set ( CPACK_RPM_PACKAGE_URL "http://www.mantidproject.org" )
+    set ( CPACK_RPM_PACKAGE_LICENSE "GPLv3" )
     set ( CPACK_RPM_COMPRESSION_TYPE "xz" )
 
-    # determine the distribution number
-    if(NOT CPACK_RPM_DIST)
-      execute_process(COMMAND ${RPMBUILD_CMD} -E %{?dist}
-                      OUTPUT_VARIABLE CPACK_RPM_DIST
-                      ERROR_QUIET
-                      OUTPUT_STRIP_TRAILING_WHITESPACE)
-    endif()
-
-    # release number defaults to 1
-    if(NOT CPACK_RPM_PACKAGE_RELEASE_NUMBER)
-      set(CPACK_RPM_PACKAGE_RELEASE_NUMBER "1")
+    set (CPACK_RPM_PACKAGE_RELEASE 1 CACHE STRING "The release number")
+    if ( CMAKE_VERSION VERSION_GREATER "3.5" )
+      # cmake can set the distribution flag correctly
+      set (CPACK_RPM_PACKAGE_RELEASE_DIST "on")
+    else ()
+      message (FATAL_ERROR "Will not create packages on cmake <3.6")
     endif()
-
-    # reset the release name
-    set( CPACK_RPM_PACKAGE_RELEASE "${CPACK_RPM_PACKAGE_RELEASE_NUMBER}${CPACK_RPM_DIST}" )
-
-    # If CPACK_SET_DESTDIR is ON then the Prefix doesn't get put in the spec file
-    if( CPACK_SET_DESTDIR )
-      message ( STATUS "Adding \"Prefix:\" line to spec file manually when CPACK_SET_DESTDIR is set")
-      set( CPACK_RPM_SPEC_MORE_DEFINE "Prefix: ${CPACK_PACKAGING_INSTALL_PREFIX}" )
-    endif()
-
-    # according to rpm.org: name-version-release.architecture.rpm
-    set ( CPACK_PACKAGE_FILE_NAME
-      "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${CPACK_RPM_PACKAGE_RELEASE}.${CPACK_RPM_PACKAGE_ARCHITECTURE}" )
-  endif ( RPMBUILD_CMD)
-endif ()
+    set (CPACK_RPM_FILE_NAME RPM-DEFAULT)
+  endif()
+endif()
diff --git a/buildconfig/CMake/GNUSetup.cmake b/buildconfig/CMake/GNUSetup.cmake
index 05525e7c84173dbb11d0d9173b2f1447064da861..b9e36a1835f64ef47f0a2b18cc372d8aae06c549 100644
--- a/buildconfig/CMake/GNUSetup.cmake
+++ b/buildconfig/CMake/GNUSetup.cmake
@@ -47,6 +47,13 @@ add_compile_options ( $<$<COMPILE_LANGUAGE:CXX>:-Woverloaded-virtual>
   $<$<COMPILE_LANGUAGE:CXX>:-fno-operator-names>
 )
 
+#Linking errors on Ubuntu 18.04 with --enable-new-dtags 
+if ( ${CMAKE_SYSTEM_NAME} STREQUAL "Linux" )
+  string(APPEND CMAKE_MODULE_LINKER_FLAGS " -Wl,--disable-new-dtags" )
+  string(APPEND CMAKE_EXE_LINKER_FLAGS " -Wl,--disable-new-dtags" )
+  string(APPEND CMAKE_SHARED_LINKER_FLAGS " -Wl,--disable-new-dtags" )
+endif ()
+
 # Check if we have a new enough version for these flags
 if ( CMAKE_COMPILER_IS_GNUCXX )
   add_compile_options ( -Wpedantic )
diff --git a/buildconfig/CMake/VersionNumber.cmake b/buildconfig/CMake/VersionNumber.cmake
index 4c0f5128e8b939456c1cfba33f38779ebbdc5887..ad429218ae4cc034c41c629a38778453c12f338f 100644
--- a/buildconfig/CMake/VersionNumber.cmake
+++ b/buildconfig/CMake/VersionNumber.cmake
@@ -1,7 +1,7 @@
 # Set the version number here for MantidVersion and the package filenames
 
 set ( VERSION_MAJOR 3 )
-set ( VERSION_MINOR 13 )
+set ( VERSION_MINOR 12 )
 
 # UNCOMMENT the next 'set' line to 'force' the patch version number to
 # a value (instead of using the count coming out of 'git describe')
diff --git a/buildconfig/Jenkins/buildscript b/buildconfig/Jenkins/buildscript
index 8fa2a4c145719e21ed9ca1297afe0aab5b071e86..d4fb2f75f0be02f48d78c4bf10f9bb513df44266 100755
--- a/buildconfig/Jenkins/buildscript
+++ b/buildconfig/Jenkins/buildscript
@@ -233,9 +233,17 @@ if [[ ${DO_BUILD_PKG} == true ]]; then
       # everything else uses lower-case values
       PACKAGINGVARS="${PACKAGINGVARS} -DCMAKE_INSTALL_PREFIX=/opt/mantid${PACKAGE_SUFFIX} -DCPACK_PACKAGE_SUFFIX=${PACKAGE_SUFFIX}"
     fi
+
+    if [[ ${ON_RHEL7} == true ]]; then
+      if [[ -n "${RELEASE_NUMBER}" ]]; then
+        RELEASE_NUMBER="1"
+      fi
+      PACKAGINGVARS="${PACKAGINGVARS} -DCPACK_RPM_PACKAGE_RELEASE=${RELEASE_NUMBER}"
+    fi
   fi
 fi
 
+
 ###############################################################################
 # Generator
 ###############################################################################
diff --git a/docs/source/algorithms/CalculateCarpenterSampleCorrection-v1.rst b/docs/source/algorithms/CalculateCarpenterSampleCorrection-v1.rst
index 61ad1d82c9b4d7a83d1a35e5e1c50660f15ba250..e4a8ef5f0914db023dd5be721ecf5e6209e5c3ab 100644
--- a/docs/source/algorithms/CalculateCarpenterSampleCorrection-v1.rst
+++ b/docs/source/algorithms/CalculateCarpenterSampleCorrection-v1.rst
@@ -17,7 +17,7 @@ Mikkelson.
 
 * Elastic scattering is assumed
 
-In [1] we see that the calculation of the attenuation factor F involves 
+In [1]_ we see that the calculation of the attenuation factor F involves 
 an integral over the sample cylinder. By expanding the integrands as a power series, 
 we can factor out any dependence on scattering cross section and radius. 
 These integral terms are denoted by :math:`Z_{mn}` and so we may write:
@@ -36,7 +36,7 @@ expansion coefficients:
 where the Chebyshev coefficients :math:`c_{s}(m,n)` up to  m + n 
 :math:`\leqslant` 5 have been tabulated and are stored as an array by the algorithm.
 
-This version of the correction follows the implemenation in [1] in that it only calculates for the correction in-plane, unlike [2] that generalizes the correction to out-of-plane.
+This version of the correction follows the implemenation in [1]_ in that it only calculates for the correction in-plane, unlike [2]_, [3]_ that generalizes the correction to out-of-plane.
 
 This algorithm calculates and outputs the absorption and/or multiple scattering correction workspaces to be applied to the InputWorkspace. Thus, there are, at most, two workspaces in the OutputWorkspaceBaseName group workspace. This allows for flexibility of applying either correction to a workspace without having to apply both (as is the case with :ref:`algm-CarpenterSampleCorrection`). For the case where both corrections are calculated, the output will be the following:
 
diff --git a/docs/source/algorithms/CarpenterSampleCorrection-v1.rst b/docs/source/algorithms/CarpenterSampleCorrection-v1.rst
index 6ad17d7d325e7d457d98e04991b9af97e35aebdc..aecb33d5be161ee1c10a66838a9161bb990f9376 100644
--- a/docs/source/algorithms/CarpenterSampleCorrection-v1.rst
+++ b/docs/source/algorithms/CarpenterSampleCorrection-v1.rst
@@ -17,7 +17,7 @@ Mikkelson.
 
 * Elastic scattering is assumed
 
-In [1]__ we see that the calculation of the attenuation factor F involves 
+In [1]_ we see that the calculation of the attenuation factor F involves 
 an integral over the sample cylinder. By expanding the integrands as a power series, 
 we can factor out any dependence on scattering cross section and radius. 
 These integral terms are denoted by :math:`Z_{mn}` and so we may write:
@@ -36,7 +36,7 @@ expansion coefficients:
 where the Chebyshev coefficients :math:`c_{s}(m,n)` up to  m + n 
 :math:`\leqslant` 5 have been tabulated and are stored as an array by the algorithm.
 
-This version of the correction follows the implemenation in [1] in that it only calculates for the correction in-plane, unlike [2] that generalizes the correction to out-of-plane.
+This version of the correction follows the implemenation in [1]_ in that it only calculates for the correction in-plane, unlike [2]_, [3]_ that generalizes the correction to out-of-plane.
 
 This algorithm calls :ref:`algm-CalculateCarpenterSampleCorrection` to calculate both absorption and multiple scattering corrections and then applies both to the sample workspace.
 
diff --git a/docs/source/algorithms/EditInstrumentGeometry-v1.rst b/docs/source/algorithms/EditInstrumentGeometry-v1.rst
index 8773eb259d3ce5c66308b97d90a8dde4d7848d50..2ef6b3fb137bb7532b4afc1fedaf76ddcdb0c95a 100644
--- a/docs/source/algorithms/EditInstrumentGeometry-v1.rst
+++ b/docs/source/algorithms/EditInstrumentGeometry-v1.rst
@@ -13,7 +13,7 @@ This algorithm can
 
 | ``1. add an Instrument to a Workspace without any real instrument associated with, or``
 | ``2. replace a Workspace's Instrument with a new Instrument, or``
-| ``3. edit all detectors' parameters of the instrument associated with a Workspace (partial instrument editing is not supported). ``
+| ``3. edit all detectors' parameters of the instrument associated with a Workspace (partial instrument editing is not supported).``
 
 Requirements on input properties
 --------------------------------
diff --git a/docs/source/algorithms/LeBailFit-v1.rst b/docs/source/algorithms/LeBailFit-v1.rst
index 3cdafc2c9414113a285dc76b6e1e8566fb6074fa..5853c189614b580a70226573a808de6e29091064 100644
--- a/docs/source/algorithms/LeBailFit-v1.rst
+++ b/docs/source/algorithms/LeBailFit-v1.rst
@@ -53,9 +53,9 @@ library might be supported.
 Supported functionalities
 #########################
 
-| ``* LeBailFit: fit profile parameters by Le bail algorithm; ``
-| ``* Calculation: pattern calculation by Le bail algorithm; ``
-| ``* MonteCarlo: fit profile parameters by Le bail algorithm with Monte Carlo random wal; ``
+| ``* LeBailFit: fit profile parameters by Le bail algorithm;``
+| ``* Calculation: pattern calculation by Le bail algorithm;``
+| ``* MonteCarlo: fit profile parameters by Le bail algorithm with Monte Carlo random wal;``
 | ``* RefineBackground: refine background parameters``
 
 Further Information
diff --git a/docs/source/algorithms/MultipleScatteringCylinderAbsorption-v1.rst b/docs/source/algorithms/MultipleScatteringCylinderAbsorption-v1.rst
deleted file mode 100644
index bcb4b1ce649ed5156a2e90269f241aec0551b587..0000000000000000000000000000000000000000
--- a/docs/source/algorithms/MultipleScatteringCylinderAbsorption-v1.rst
+++ /dev/null
@@ -1,77 +0,0 @@
-.. algorithm::
-
-.. summary::
-
-.. relatedalgorithms::
-
-.. properties::
-
-Description
------------
-This algorithm is a port to C++ of a multiple scattering absorption
-correction, used to correct the vanadium spectrum for the GPPD
-instrument at the IPNS. The correction calculation was originally worked
-out by Jack Carpenter and Asfia Huq and implemented in Java by Alok
-Chatterjee. The java code was translated to C++ in Mantid by Dennis
-Mikkelson.
-
-* Elastic scattering is assumed
-
-In [1]__ we see that the calculation of the attenuation factor F involves 
-an integral over the sample cylinder. By expanding the integrands as a power series, 
-we can factor out any dependence on scattering cross section and radius. 
-These integral terms are denoted by :math:`Z_{mn}` and so we may write:
-
-.. math::
-   \frac{1}{F} = \sum_{m=0}^\infty\sum_{n=0}^\infty\frac{(-1)^{m+n}}{m!n!}(\mu R)^{m+n} Z_{mn}(\theta)
-
-where :math:`\mu` is the inverse scattering length.
-
-The functions :math:`Z_{mn}(\theta)` are written in terms of Chebyshev 
-expansion coefficients:
-
-.. math::
-  Z_{mn}(\theta) = \sum_{s=0}^\infty c_{s}(m,n)cos(s\theta)
-  
-where the Chebyshev coefficients :math:`c_{s}(m,n)` up to  m + n 
-:math:`\leqslant` 5 have been tabulated and are stored as an array by the algorithm.
-
-Usage
------
-
-**Example: A simple cylindrical sample**
-
-.. testcode:: ExMultipleScatteringCylinderAbsorption
-
-    ws = CreateSampleWorkspace("Histogram",NumBanks=1,BankPixelWidth=1)
-    ws = ConvertUnits(ws,"Wavelength")
-    ws = Rebin(ws,Params=[1])
-    SetSampleMaterial(ws,ChemicalFormula="V")
-
-    #restrict the number of wavelength points to speed up the example
-    wsOut = MultipleScatteringCylinderAbsorption(ws,CylinderSampleRadius=0.2)
-
-    print("Output:  {}".format(wsOut.readY(0)))
-
-Output:
-
-.. testoutput:: ExMultipleScatteringCylinderAbsorption
-
-    Output:  [  6.1210107    6.57502041  19.47638255   7.58160094   8.13860778
-       2.33885171]
-
-
-References
-----------
-
-.. [1] J.M. Carpenter *Attenuation Correction Factor for Scattering from Cylindrical Targets* Review of Scientific Instruments **40.4** (1969): 555. doi: `10.1063/1.1684003 <http://dx.doi.org/10.1063/1.1684003>`_
-
-.. [2] D.F.R. Mildner, J.M. Carpenter, and C.A. Pelizzari *Generalized Attenuation Correction Factor for Scattering from Cylindrical Targets* Review of Scientific Instruments **45.4** (1974): 572. doi: `10.1063/1.1686687 <http://dx.doi.org/10.1063/1.1686687>`_
-
-.. [3] D.F.R. Mildner and J.M.Carpenter *Improvements to the Chebyshev Expansion of Attenuation Correction Factors for Cylindrical Samples.* J Appl Crystallogr **23.5** (1990): 378–386 doi: `10.1107/S0021889890005258 <http://dx.doi.org/10.1107/S0021889890005258>`_
-
-.. seealso :: Algorithm :ref:`algm-MayersSampleCorrection`
-
-.. categories::
-
-.. sourcelink::
diff --git a/docs/source/algorithms/RefinePowderDiffProfileSeq-v1.rst b/docs/source/algorithms/RefinePowderDiffProfileSeq-v1.rst
index a296eb204bac65b65d86b24b2ce3f3244712cb91..d609bcb0a1541ed03364683878f6b10924931e76 100644
--- a/docs/source/algorithms/RefinePowderDiffProfileSeq-v1.rst
+++ b/docs/source/algorithms/RefinePowderDiffProfileSeq-v1.rst
@@ -20,58 +20,89 @@ subset of profile parameters are refined.
 In order to control the workflow, there are four major functions
 supported by this algorithm
 
-| ``* ``\ *``Setup``*\ `` : set up a few workspaces that will be used to refine profile parameters in multiple steps;``
-| ``* ``\ *``Refine``*\ `` : select a subset of peak parameters and do Le Bail fit on them;``
-| ``* ``\ *``Save``*\ `` : save the current refinement status and refinement history to a project file;``
-| ``* ``\ *``Load``*\ `` : set up a few workspaces used for refining by loading them from a previously created project file. ``
+- **Setup**  : set up a few workspaces that will be used to refine profile parameters in multiple steps;
+
+- **Refine**  : select a subset of peak parameters and do Le Bail fit on them;
+
+- **Save**  : save the current refinement status and refinement history to a project file;
+
+- **Load**  : set up a few workspaces used for refining by loading them from a previously created project file.
+
+
 
 Input and output workspaces
 ###########################
 
-| ``* InputWorkspace : data workspace containing the diffraction pattern to refine profile parameters with;``
-| ``* SeqControlInfoWorkspace : table workspace used to track refinement; Below is the introduction on the fields/columns of this workspace. ``
-| ``    *  "Step" : refinement step.  User can start a refinement from the result of any previous ``\ **``Step``**\ ``; ``
-| ``    *  "OutProfile" : name of the table workspace containing refined profile parameters;``
-| ``    *  "OutReflection": name of the table workspace containing Bragg peaks' peak parameters calculated from refined parameters' value; ``
-| ``    *  "OutBackgroud": name of the table workspace containing the output background parameters' value; ``
-| ``    *  "OutBckgroundParam": name of the output background parameters;``
-| ``    *  "Refine": profile parameters that are refined in this step;``
-| ``    *  "RwpOut": output Rwp from refinement; ``
-| ``    *  "LastStep": last step where this step is based on; ``
-| ``    *  "RwpIn": input Rwp``
-| ``    *  "InProfile": input profile parameter workspace's name;``
-| ``    *  "InReflection": input Bragg peak parameters workspace' name;``
-| ``    *  "InBackgroud": input background workspace; ``
-| ``    *  "InBckgroundParam": input background parameters. ``
-| ``* InputProfileWorkspace : table workspace contraining starting values of profile parameters;``
-| ``* InputBraggPeaksWorkspace : table workspace containing the Bragg peaks' information for Le Bail fit;``
-| ``* InputBackgroundParameterWorkspace : table workspace containing the background parameters' value``
+
+
+- InputWorkspace : data workspace containing the diffraction pattern to refine profile parameters with;
+
+- SeqControlInfoWorkspace : table workspace used to track refinement. Below is the introduction on the fields\/columns of this workspace.
+
+- *Step* : refinement step. User can start a refinement from the result of any previous step;
+  
+- *OutProfile* : name of the table workspace containing refined profile parameters;
+  
+- *OutReflection* : name of the table workspace containing Bragg peaks' peak parameters calculated from refined parameters value;
+  
+- *OutBackgroud* : name of the table workspace containing the value of the output background parameter;
+  
+- *OutBckgroundParam* : name of the output background parameters;
+  
+- *Refine* : profile parameters that are refined in this step;
+
+- *RwpOut* : output Rwp from refinement;
+
+- *LastStep* : last step where this step is based on;
+
+- *RwpIn* : input Rwp
+
+- *InProfile* : input profile parameter workspace's name;
+
+- *InReflection* : input Bragg peak parameters workspace' name;
+
+- *InBackgroud* : input background workspace;
+
+- *InBckgroundParam* : input background parameters.
+
+
+- InputProfileWorkspace : table workspace contraining starting values of profile parameters;
+
+- InputBraggPeaksWorkspace : table workspace containing the Bragg peaks' information for Le Bail fit;
+
+- InputBackgroundParameterWorkspace : table workspace containing the background parameters' value
+
+
 
 Supported peak profiles
 #######################
 
-| ``* Neutron Back-to-back exponential convoluted with pseudo-voigt : Fullprof profile 9 and GSAS TOF profile 3;``
-| ``* Thermal neutron Back-to-back exponential convoluted with pseudo-voigt: Fullprof profile 10 (a.k.a. Jason Hodges function). ``
+- Neutron Back-to-back exponential convoluted with pseudo-voigt : Fullprof profile 9 and GSAS TOF profile 3;
+
+- Thermal neutron Back-to-back exponential convoluted with pseudo-voigt: Fullprof profile 10 (a.k.a. Jason Hodges function).
+
 
 Supported background types
 ##########################
 
-| ``* Polynomial``
-| ``* Chebyshev``
-| ``* FullprofPolynomial``
+- Polynomial
+
+- Chebyshev
+
+- FullprofPolynomial
 
 Hint for using
 --------------
 
 This is just a brief description for how to use this algorithm.
 
-| ``1. ``\ *``Setup``*\ ``;``
-| ``2. ``\ *``Refine``*\ ``: refine ``\ *``Dtt1``*\ `` and ``\ *``Zero``*\ `` from step 0;``
-| ``3. ``\ *``Refine``*\ ``: reifne ``\ *``Alph0``*\ `` and ``\ *``Beta0``*\ `` from step 1;``
-| ``4. ``\ *``Refine``*\ ``: refine ``\ *``Alph1``*\ `` from step 1 with failure;``
-| ``5. ``\ *``Refine``*\ ``: refine ``\ *``Beta1``*\ `` from step 1 because step 2 fails; ``
-| ``6. ``\ *``Refine``*\ ``: refine ``\ *``Sig-1``*\ `` from last step;``
-| ``7. ``\ *``Save``*\ ``: save current work and history to a Nexus file.``
+1. *Setup*;
+2. *Refine*: refine *Dtt1* and *Zero* from step 0;
+3. *Refine* : reifne *Alph0*  and  *Beta0*  from step 1;
+4. *Refine* : refine *Alph1*  from step 1 with failure;
+5. *Refine* : refine *Beta1*  from step 1 because step 2 fails;
+6. *Refine* : refine *Sig-1*  from last step;
+7. *Save* : save current work and history to a Nexus file.
 
 .. categories::
 
diff --git a/docs/source/algorithms/SANSWideAngleCorrection-v1.rst b/docs/source/algorithms/SANSWideAngleCorrection-v1.rst
index f2fd5aa11cdad04d2235637a7298f26b2519b5e0..be9649e32b884d09b31c66e4e49c8cd7431df919 100644
--- a/docs/source/algorithms/SANSWideAngleCorrection-v1.rst
+++ b/docs/source/algorithms/SANSWideAngleCorrection-v1.rst
@@ -60,7 +60,7 @@ Error Propagation
 
 The error propagation follows this formula:
 
-`` ``\ :math:`OutputWorkspace_{error} = \frac{T_{0E} ^A - 1}{A\ln(T_0E)}`
+:math:`OutputWorkspace_{error} = \frac{T_{0E} ^A - 1}{A\ln(T_0E)}`
 
 Which means, that we do not consider the error in the definition of the
 :math:`2\theta` (the parameter A)
diff --git a/docs/source/api/python/mantid/plots/index.rst b/docs/source/api/python/mantid/plots/index.rst
index 18b9eaefd58b2133d7b51a649a0fde516b8641ad..58b4b7d33a0d555cb905b49d718b914a7461479a 100644
--- a/docs/source/api/python/mantid/plots/index.rst
+++ b/docs/source/api/python/mantid/plots/index.rst
@@ -258,4 +258,11 @@ Functions to use when **mantid3d** projection is not available
 Helper functions
 ----------------
 .. automodule:: mantid.plots.helperfunctions
+   :members: get_distribution, get_normalization,
+             points_from_boundaries, boundaries_from_points,
+             get_wksp_index_dist_and_label, get_md_data, get_md_data1d,
+             get_md_data2d_bin_bounds, get_md_data2d_bin_centers,
+             get_spectrum, get_matrix_2d_data,get_uneven_data,
+             get_sample_log, get_axes_labels
+
 
diff --git a/docs/source/release/v3.13.0/framework.rst b/docs/source/release/v3.13.0/framework.rst
index dc5e9719f56eeb2e1839f63ed112ae9b71cd343c..8cccf84295899d22a1f727f3f2c24d08f889825a 100644
--- a/docs/source/release/v3.13.0/framework.rst
+++ b/docs/source/release/v3.13.0/framework.rst
@@ -18,23 +18,23 @@ Stability
 
 - We have introduced a Project Recovery mechanism for Mantidplot in order to be able to recover the lost state of the interface in the event of a crash or unexpected shutdown.  There are more details in the UI section of the release notes.
 - The error reporter can now catches hard crashes to desktop, allowing us to get more information on causes of hangs or crashes in Mantid.  Since the last release error reports sent to us led directly to the identification of and fixes for 3 seperate bugs in Mantid.
-- Mantid now handles poor network stability  better when reading live data from the ISIS DAE.  Mantid will now timeout after a couple of minutes of loss of network connectivity and remains responsive during this time.  You can alter the duration of this timeout by adding a line to the mantid.user.properties file like:
-
-```
-ISISDAE.Timeout = 100 #seconds
-```
+- Mantid now handles poor network stability  better when reading live data from the ISIS DAE.  Mantid will now timeout after a couple of minutes of loss of network connectivity and remains responsive during this time.  
+  You can alter the duration of this timeout by adding a line to the mantid.user.properties file like:
+  ```
+  ISISDAE.Timeout = 100 #seconds
+  ```
 
 
 Algorithms
 ----------
 
-New
-###
+New feature
+###########
 
 - Sometimes the algorithm you are looking at is close to what you want, but not quite, to help you find the right one a list of related algorithms has been added to each algorithm, and is displayed in the documentation page of each algorithm as part of it's summary.
 
 New Algorithms
-**************
+##############
 
 - :ref:`LoadSampleShape <algm-LoadSampleShape>` loads a shape into the sample in a workspace from an
   ASCII `STL <https://en.wikipedia.org/wiki/STL_(file_format)>`_  file,
diff --git a/docs/source/release/v3.13.0/index.rst b/docs/source/release/v3.13.0/index.rst
index d2ce930b2e3c1f95203fc5c08030da0c250ded67..78374be03a9e753730447772608e7708ca723379 100644
--- a/docs/source/release/v3.13.0/index.rst
+++ b/docs/source/release/v3.13.0/index.rst
@@ -28,7 +28,7 @@ time and effort helping us to make this another reliable version of Mantid.
 Citation
 --------
 
-Please cite any usage of Mantid as follows: **TODO update with current version doi**
+Please cite any usage of Mantid as follows:
 
 - *Mantid 3.13.0: Manipulation and Analysis Toolkit for Instrument Data.; Mantid Project*. `doi: 10.5286/SOFTWARE/MANTID3.13.0 <http://dx.doi.org/10.5286/SOFTWARE/MANTID3.13.0>`_
 
diff --git a/docs/source/release/v3.13.0/muon.rst b/docs/source/release/v3.13.0/muon.rst
index bc6d353fabe403235e5f01fee2d45c51efb3c9b5..a6ab959b68b8e4ab3a122dc4c57332aa15b555de 100644
--- a/docs/source/release/v3.13.0/muon.rst
+++ b/docs/source/release/v3.13.0/muon.rst
@@ -10,7 +10,7 @@ Interface
 
 Improvements
 ############
-- The updated :ref:`EstimateMuonAsymmetryFromCounts <algm-EstimateMuonAsymmetryFromCounts>` is used in Muon Analysis.
+- The updated :ref:`EstimateMuonAsymmetryFromCounts <algm-EstimateMuonAsymmetryFromCounts>` and :ref:`CalculateMuonAsymmetry <algm-CalculateMuonAsymmetry>` are used in Muon Analysis.
 - TF Asymmetry mode now uses :ref:`CalMuonDetectorPhases <algm-CalMuonDetectorPhases>` and the fitting function is updated to show the normalization.
 
 Bugfixes
@@ -33,6 +33,7 @@ New
 Improvements
 ############
 - :ref:`EstimateMuonAsymmetryFromCounts <algm-EstimateMuonAsymmetryFromCounts>` now updates a normalization table and produces unnormalized data.
+- :ref:`CalculateMuonAsymmetry <algm-CalculateMuonAsymmetry>` now updates a normalization table and can take multiple workspaces.
 
 Bugfixes
 ########
diff --git a/qt/paraview_ext/VatesSimpleGui/QtWidgets/CMakeLists.txt b/qt/paraview_ext/VatesSimpleGui/QtWidgets/CMakeLists.txt
index a67121969ada40ca346a259ce484deffb4e4b62f..5a82770fc52197b9c643b4984b2dfbfa45695968 100644
--- a/qt/paraview_ext/VatesSimpleGui/QtWidgets/CMakeLists.txt
+++ b/qt/paraview_ext/VatesSimpleGui/QtWidgets/CMakeLists.txt
@@ -52,7 +52,7 @@ mtd_add_qt_library (TARGET_NAME VatesSimpleGuiQtWidgets
     @loader_path/../../Contents/Libraries
     @loader_path/../../Contents/MacOS
   LINUX_INSTALL_RPATH
-    "\$ORIGIN/../../${LIB_DIR}"
+    "\$ORIGIN/../../${LIB_DIR};\$ORIGIN/../../${LIB_DIR}/paraview-${ParaView_VERSION_MAJOR}.${ParaView_VERSION_MINOR}"
 )
 
 # Set the name of the generated library
diff --git a/qt/paraview_ext/VatesSimpleGui/ViewWidgets/CMakeLists.txt b/qt/paraview_ext/VatesSimpleGui/ViewWidgets/CMakeLists.txt
index 68d604c13eb610df9e280c8dffcf4bfd5cfc12ec..de0d7e2dc6a84c3c7593200dc0284a9e3735da2c 100644
--- a/qt/paraview_ext/VatesSimpleGui/ViewWidgets/CMakeLists.txt
+++ b/qt/paraview_ext/VatesSimpleGui/ViewWidgets/CMakeLists.txt
@@ -136,7 +136,7 @@ mtd_add_qt_library (TARGET_NAME VatesSimpleGuiViewWidgets
     @loader_path/../../Contents/MacOS
     @loader_path/../../Contents/Libraries
   LINUX_INSTALL_RPATH
-    "\$ORIGIN/../../${LIB_DIR};\$ORIGIN"
+    "\$ORIGIN/../../${LIB_DIR};\$ORIGIN/../../${LIB_DIR}/paraview-${ParaView_VERSION_MAJOR}.${ParaView_VERSION_MINOR};\$ORIGIN"
 )
 
 # Set the name of the generated library
diff --git a/qt/widgets/common/CMakeLists.txt b/qt/widgets/common/CMakeLists.txt
index 60a626ec6eb2c59f9bc0848f55f86af1b526f2ad..58323bb8d146747ce0b1eea469939e39b426d508 100644
--- a/qt/widgets/common/CMakeLists.txt
+++ b/qt/widgets/common/CMakeLists.txt
@@ -535,7 +535,7 @@ mtd_add_qt_library (TARGET_NAME MantidQtWidgetsCommon
     @loader_path/../MacOS
     @loader_path/../Libraries
   LINUX_INSTALL_RPATH
-    "\$ORIGIN/../../${LIB_DIR}"
+    "\$ORIGIN/../${LIB_DIR}"
 )
 
 ###########################################################################
diff --git a/qt/widgets/common/src/MuonFitPropertyBrowser.cpp b/qt/widgets/common/src/MuonFitPropertyBrowser.cpp
index 843913cbfeb86420439b34e48470347c05759a48..6646c9698f93e2d18c75e2d77fcc3e06e6724057 100644
--- a/qt/widgets/common/src/MuonFitPropertyBrowser.cpp
+++ b/qt/widgets/common/src/MuonFitPropertyBrowser.cpp
@@ -1185,13 +1185,20 @@ void MuonFitPropertyBrowser::ConvertFitFunctionForMuonTFAsymmetry(
     std::string mode = (enabled) ? "Construct" : "Extract";
     alg->setProperty("Mode", mode);
     alg->execute();
+    if (!alg->isExecuted()) {
+      return;
+    }
     IFunction_sptr func = alg->getProperty("OutputFunction");
 
     // multiple fit
     if (m_isMultiFittingMode) {
       // update values in browser
-      auto tmp = boost::dynamic_pointer_cast<MultiDomainFunction>(func);
-      old = tmp->getFunction(0);
+      if (func->getNumberDomains() > 1) {
+        auto tmp = boost::dynamic_pointer_cast<MultiDomainFunction>(func);
+        old = tmp->getFunction(0);
+      } else {
+        old = func;
+      }
       m_functionBrowser->setFunction(old);
       // preserve global parameters
       QStringList newGlobals;
@@ -1209,10 +1216,11 @@ void MuonFitPropertyBrowser::ConvertFitFunctionForMuonTFAsymmetry(
 
       m_functionBrowser->setGlobalParameters(newGlobals);
       // if multi data set we need to do the fixes manually
+      // the current domain is automatic
       auto originalNames = func->getParameterNames();
       for (auto name : originalNames) {
         auto index = func->parameterIndex(name);
-        if (func->isFixed(index)) {
+        if (func->isFixed(index) && func->getNumberDomains() > 1) {
           // get domain
           auto index = name.find_first_of(".");
           std::string domainStr = name.substr(1, index - 1);
@@ -1243,18 +1251,24 @@ void MuonFitPropertyBrowser::ConvertFitFunctionForMuonTFAsymmetry(
 * @param enabled :: [input] Whether to turn this mode on or off
 */
 void MuonFitPropertyBrowser::setTFAsymmMode(bool enabled) {
-  ConvertFitFunctionForMuonTFAsymmetry(enabled);
-
-  // Show or hide the TFAsymmetry fit
-  if (enabled) {
-    // m_settingsGroup->property()->addSubProperty(m_normalization);
-    // m_multiFitSettingsGroup->property()->addSubProperty(m_normalization);
-    m_settingsGroup->property()->addSubProperty(m_keepNorm);
-    // setNormalization();
-  } else {
-    // m_settingsGroup->property()->removeSubProperty(m_normalization);
-    // m_multiFitSettingsGroup->property()->removeSubProperty(m_normalization);
-    m_settingsGroup->property()->removeSubProperty(m_keepNorm);
+  IFunction_sptr old =
+      boost::dynamic_pointer_cast<IFunction>(m_compositeFunction);
+  if (old->nParams() > 0) {
+    ConvertFitFunctionForMuonTFAsymmetry(enabled);
+    // Show or hide the TFAsymmetry fit
+    if (enabled) {
+      m_settingsGroup->property()->addSubProperty(m_keepNorm);
+    } else {
+      m_settingsGroup->property()->removeSubProperty(m_keepNorm);
+    }
+  } else if (enabled) {
+    // will update when user clicks elsewhere
+    m_boolManager->setValue(m_TFAsymmMode, false);
+    QMessageBox::warning(this, "Muon Analysis",
+                         "No fitting function provided. TF Asymmetry mode "
+                         "requires a fitting function to be added before "
+                         "enabling. Please add a fitting function and enable "
+                         "TF Asymmetry Mode again.");
   }
 }
 std::string MuonFitPropertyBrowser::TFExtension() const {
diff --git a/qt/widgets/common/src/WorkspacePresenter/WorkspaceTreeWidget.cpp b/qt/widgets/common/src/WorkspacePresenter/WorkspaceTreeWidget.cpp
index 9e08a44e462f97b068dab68b1b3c437ea8753aa5..7e0bb479cb329f963ec5adc936053b5c0030fa9d 100644
--- a/qt/widgets/common/src/WorkspacePresenter/WorkspaceTreeWidget.cpp
+++ b/qt/widgets/common/src/WorkspacePresenter/WorkspaceTreeWidget.cpp
@@ -1600,9 +1600,14 @@ void WorkspaceTreeWidget::onClickShowDetectorTable() {
 
 void WorkspaceTreeWidget::showDetectorsTable() {
   // get selected workspace
-  auto ws = getSelectedWorkspaceNames()[0];
-  m_mantidDisplayModel->createDetectorTable(QString::fromStdString(ws),
-                                            std::vector<int>(), false);
+  auto ws = QString::fromStdString(getSelectedWorkspaceNames()[0]);
+  auto table =
+      m_mantidDisplayModel->createDetectorTable(ws, std::vector<int>(), false);
+  if (!table) {
+    QMessageBox::information(
+        this, "Error",
+        QString("Cannot create detectors tables for workspace ") + ws);
+  }
 }
 
 void WorkspaceTreeWidget::onClickShowBoxData() {
diff --git a/qt/widgets/factory/CMakeLists.txt b/qt/widgets/factory/CMakeLists.txt
index c450d7876fc0d67fee22eb157468bbc060149381..1f6dd12c4dd5f92a61183efd198fe9f8f009c656 100644
--- a/qt/widgets/factory/CMakeLists.txt
+++ b/qt/widgets/factory/CMakeLists.txt
@@ -37,6 +37,7 @@ mtd_add_qt_library (TARGET_NAME MantidQtWidgetsFactory
     MantidQtWidgetsCommon
     MantidQtWidgetsLegacyQwt
     MantidQtWidgetsSliceViewer
+  LINUX_INSTALL_RPATH
+    "\$ORIGIN/../${LIB_DIR}"
 )
 
-
diff --git a/qt/widgets/legacyqwt/CMakeLists.txt b/qt/widgets/legacyqwt/CMakeLists.txt
index ad33de02b3aa1c7794eeb637e0e626078f41b7f7..004a0271725b9273dc9f2c10eb9ff12b3d7d49fa 100644
--- a/qt/widgets/legacyqwt/CMakeLists.txt
+++ b/qt/widgets/legacyqwt/CMakeLists.txt
@@ -80,7 +80,7 @@ mtd_add_qt_library (TARGET_NAME MantidQtWidgetsLegacyQwt
   OSX_INSTALL_RPATH
     @loader_path/../MacOS
   LINUX_INSTALL_RPATH
-    "\$ORIGIN/../../${LIB_DIR}"
+    "\$ORIGIN/../${LIB_DIR}"
 )
 
 ###########################################################################
diff --git a/qt/widgets/sliceviewer/CMakeLists.txt b/qt/widgets/sliceviewer/CMakeLists.txt
index 6343a7bb3aff460fb61de6aac535699e1d5749e5..3473c45eddebfbe39d90627bd6f1b78a7ed6fd7f 100644
--- a/qt/widgets/sliceviewer/CMakeLists.txt
+++ b/qt/widgets/sliceviewer/CMakeLists.txt
@@ -141,7 +141,7 @@ mtd_add_qt_library (TARGET_NAME MantidQtWidgetsSliceViewer
   OSX_INSTALL_RPATH
     loader_path/../MacOS
   LINUX_INSTALL_RPATH
-    "\$ORIGIN/../../${LIB_DIR}"
+    "\$ORIGIN/../${LIB_DIR}"
 )
 
 ###########################################################################
diff --git a/qt/widgets/sliceviewer/src/ConcretePeaksPresenter.cpp b/qt/widgets/sliceviewer/src/ConcretePeaksPresenter.cpp
index 6f4960032b22b015f110e9c8c03412645d81c76e..f5163ebe3969c1d1ee5a1c38507201af56233b03 100644
--- a/qt/widgets/sliceviewer/src/ConcretePeaksPresenter.cpp
+++ b/qt/widgets/sliceviewer/src/ConcretePeaksPresenter.cpp
@@ -554,7 +554,11 @@ bool ConcretePeaksPresenter::addPeakAt(double plotCoordsPointX,
       boost::const_pointer_cast<Mantid::API::IPeaksWorkspace>(this->m_peaksWS);
 
   const auto frame = m_transform->getCoordinateSystem();
-  peaksWS->addPeak(position, frame);
+  try {
+    peaksWS->addPeak(position, frame);
+  } catch (const std::invalid_argument &e) {
+    g_log.warning(e.what());
+  }
 
   // Reproduce the views. Proxy representations recreated for all peaks.
   this->produceViews();