From 0f71eded01a01c6af4a475394311628014bb0311 Mon Sep 17 00:00:00 2001
From: Steven Hahn <hahnse@ornl.gov>
Date: Fri, 29 Jan 2016 19:22:26 -0500
Subject: [PATCH] Refs #11815. Wrap Poco::StringTokenizer & start fixing tests.

---
 Framework/API/src/Algorithm.cpp               | 17 ++--
 Framework/API/src/AlgorithmFactory.cpp        |  9 +-
 Framework/API/src/AlgorithmProxy.cpp          |  7 +-
 Framework/API/src/ConstraintFactory.cpp       |  2 +-
 Framework/API/src/ExperimentInfo.cpp          |  8 +-
 Framework/API/src/Expression.cpp              |  4 +-
 Framework/API/src/FileFinder.cpp              | 14 +--
 Framework/API/src/FunctionFactory.cpp         |  2 +-
 Framework/API/src/IFunction.cpp               | 13 +--
 Framework/API/src/ScriptRepositoryFactory.cpp |  2 +-
 Framework/API/src/WorkspaceHistory.cpp        |  6 +-
 .../Algorithms/src/ReadGroupsFromFile.cpp     |  3 +-
 .../src/Algorithms/PlotPeakByLogValue.cpp     |  4 +-
 .../inc/MantidDataHandling/GroupDetectors2.h  | 10 +-
 .../DataHandling/src/CheckMantidVersion.cpp   |  9 +-
 .../DataHandling/src/GroupDetectors2.cpp      | 19 ++--
 .../DataHandling/src/ISISDataArchive.cpp      |  2 +-
 Framework/DataHandling/src/LoadAscii.cpp      |  2 +-
 Framework/DataHandling/src/LoadAscii2.cpp     |  2 +-
 .../DataHandling/src/LoadNexusProcessed.cpp   |  8 +-
 Framework/DataHandling/src/LoadRKH.cpp        | 11 ++-
 Framework/DataHandling/src/LoadReflTBL.cpp    |  2 +-
 Framework/DataHandling/src/LoadSpice2D.cpp    |  2 +-
 .../src/UpdateInstrumentFromFile.cpp          |  5 +-
 .../DataHandling/test/SaveParameterFileTest.h |  2 +-
 .../inc/MantidGeometry/Instrument/Parameter.h |  3 +-
 .../Geometry/src/Instrument/FitParameter.cpp  |  4 +-
 Framework/Kernel/CMakeLists.txt               |  1 +
 .../inc/MantidKernel/PropertyWithValue.h      | 27 ++++--
 .../Kernel/inc/MantidKernel/StringTokenizer.h | 96 +++++++++++++++++++
 Framework/Kernel/src/ConfigService.cpp        |  8 +-
 Framework/Kernel/src/FacilityInfo.cpp         |  4 +-
 Framework/Kernel/src/FilterChannel.cpp        |  2 +-
 Framework/Kernel/src/Interpolation.cpp        |  4 +-
 Framework/Kernel/src/Strings.cpp              | 28 ++----
 .../src/BoxControllerSettingsAlgorithm.cpp    |  2 +-
 .../WorkflowAlgorithms/src/HFIRInstrument.cpp |  6 +-
 .../src/Muon/MuonAnalysis.cpp                 |  2 +-
 .../CustomInterfaces/src/SANSRunWindow.cpp    |  8 +-
 39 files changed, 242 insertions(+), 118 deletions(-)
 create mode 100644 Framework/Kernel/inc/MantidKernel/StringTokenizer.h

diff --git a/Framework/API/src/Algorithm.cpp b/Framework/API/src/Algorithm.cpp
index 01687e11653..3bc5b1f36c4 100644
--- a/Framework/API/src/Algorithm.cpp
+++ b/Framework/API/src/Algorithm.cpp
@@ -26,7 +26,7 @@
 #include <Poco/ActiveResult.h>
 #include <Poco/NotificationCenter.h>
 #include <Poco/RWLock.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Void.h>
 
 #include <json/json.h>
@@ -204,9 +204,10 @@ void Algorithm::progress(double p, const std::string &msg, double estimatedTime,
 //---------------------------------------------------------------------------------------------
 /// Function to return all of the categories that contain this algorithm
 const std::vector<std::string> Algorithm::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
 
   std::vector<std::string> res(tokenizer.begin(), tokenizer.end());
 
@@ -230,10 +231,10 @@ const std::string Algorithm::workspaceMethodName() const { return ""; }
  *workspaceMethodName attached
  */
 const std::vector<std::string> Algorithm::workspaceMethodOn() const {
-  Poco::StringTokenizer tokenizer(this->workspaceMethodOnTypes(),
-                                  WORKSPACE_TYPES_SEPARATOR,
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      this->workspaceMethodOnTypes(), WORKSPACE_TYPES_SEPARATOR,
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   std::vector<std::string> res;
   res.reserve(tokenizer.count());
   for (auto iter = tokenizer.begin(); iter != tokenizer.end(); ++iter) {
diff --git a/Framework/API/src/AlgorithmFactory.cpp b/Framework/API/src/AlgorithmFactory.cpp
index 099150e852c..1c931df2d06 100644
--- a/Framework/API/src/AlgorithmFactory.cpp
+++ b/Framework/API/src/AlgorithmFactory.cpp
@@ -7,7 +7,7 @@
 #include "MantidKernel/LibraryManager.h"
 #include "MantidKernel/ConfigService.h"
 
-#include "Poco/StringTokenizer.h"
+#include "MantidKernel/StringTokenizer.h"
 
 namespace Mantid {
 namespace API {
@@ -391,9 +391,10 @@ void AlgorithmFactoryImpl::fillHiddenCategories(
     std::set<std::string> *categorySet) const {
   std::string categoryString = Kernel::ConfigService::Instance().getString(
       "algorithms.categories.hidden");
-  Poco::StringTokenizer tokenizer(categoryString, ";",
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      categoryString, ";",
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   std::copy(tokenizer.begin(), tokenizer.end(),
             std::inserter(*categorySet, categorySet->end()));
 }
diff --git a/Framework/API/src/AlgorithmProxy.cpp b/Framework/API/src/AlgorithmProxy.cpp
index 7ea45e33594..39cf34afb1f 100644
--- a/Framework/API/src/AlgorithmProxy.cpp
+++ b/Framework/API/src/AlgorithmProxy.cpp
@@ -309,9 +309,10 @@ std::string AlgorithmProxy::toString() const {
 
 /// Function to return all of the categories that contain this algorithm
 const std::vector<std::string> AlgorithmProxy::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
 
   std::vector<std::string> res(tokenizer.begin(), tokenizer.end());
 
diff --git a/Framework/API/src/ConstraintFactory.cpp b/Framework/API/src/ConstraintFactory.cpp
index 8500cc000b9..4f42deb7647 100644
--- a/Framework/API/src/ConstraintFactory.cpp
+++ b/Framework/API/src/ConstraintFactory.cpp
@@ -2,7 +2,7 @@
 #include "MantidAPI/Expression.h"
 #include "MantidAPI/IConstraint.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace API {
diff --git a/Framework/API/src/ExperimentInfo.cpp b/Framework/API/src/ExperimentInfo.cpp
index 90a35d1f73f..5f510975197 100644
--- a/Framework/API/src/ExperimentInfo.cpp
+++ b/Framework/API/src/ExperimentInfo.cpp
@@ -1134,14 +1134,14 @@ void ExperimentInfo::readParameterMap(const std::string &parameterStr) {
   Geometry::ParameterMap &pmap = this->instrumentParameters();
   Instrument_const_sptr instr = this->getInstrument()->baseInstrument();
 
-  int options = Poco::StringTokenizer::TOK_IGNORE_EMPTY;
-  options += Poco::StringTokenizer::TOK_TRIM;
-  Poco::StringTokenizer splitter(parameterStr, "|", options);
+  int options = Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY;
+  options += Mantid::Kernel::StringTokenizer::TOK_TRIM;
+  Mantid::Kernel::StringTokenizer splitter(parameterStr, "|", options);
 
   auto iend = splitter.end();
   // std::string prev_name;
   for (auto itr = splitter.begin(); itr != iend; ++itr) {
-    Poco::StringTokenizer tokens(*itr, ";");
+    Mantid::Kernel::StringTokenizer tokens(*itr, ";");
     if (tokens.count() < 4)
       continue;
     std::string comp_name = tokens[0];
diff --git a/Framework/API/src/Expression.cpp b/Framework/API/src/Expression.cpp
index a3d59245d94..6402b9a3c90 100644
--- a/Framework/API/src/Expression.cpp
+++ b/Framework/API/src/Expression.cpp
@@ -4,12 +4,12 @@
 
 #include "MantidAPI/Expression.h"
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace API {
 
-typedef Poco::StringTokenizer tokenizer;
+typedef Mantid::Kernel::StringTokenizer tokenizer;
 
 const std::string DEFAULT_OPS_STR[] = {";", ",", "=", "== != > < <= >=",
                                        "&& || ^^", "+ -", "* /", "^"};
diff --git a/Framework/API/src/FileFinder.cpp b/Framework/API/src/FileFinder.cpp
index 2c77a6ef042..4df913f499c 100644
--- a/Framework/API/src/FileFinder.cpp
+++ b/Framework/API/src/FileFinder.cpp
@@ -14,7 +14,7 @@
 
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Exception.h>
 #include <boost/regex.hpp>
 #include <boost/lexical_cast.hpp>
@@ -597,9 +597,9 @@ FileFinderImpl::findRuns(const std::string &hintstr) const {
   std::string hint = Kernel::Strings::strip(hintstr);
   g_log.debug() << "findRuns hint = " << hint << "\n";
   std::vector<std::string> res;
-  Poco::StringTokenizer hints(hint, ",",
-                              Poco::StringTokenizer::TOK_TRIM |
-                                  Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer hints(
+      hint, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   auto h = hints.begin();
 
   for (; h != hints.end(); ++h) {
@@ -616,9 +616,9 @@ FileFinderImpl::findRuns(const std::string &hintstr) const {
       fileSuspected = true;
     }
 
-    Poco::StringTokenizer range(*h, "-",
-                                Poco::StringTokenizer::TOK_TRIM |
-                                    Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+    Mantid::Kernel::StringTokenizer range(
+        *h, "-", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
     if ((range.count() > 2) && (!fileSuspected)) {
       throw std::invalid_argument("Malformed range of runs: " + *h);
     } else if ((range.count() == 2) && (!fileSuspected)) {
diff --git a/Framework/API/src/FunctionFactory.cpp b/Framework/API/src/FunctionFactory.cpp
index c44e9ec25be..4a7c7ba8361 100644
--- a/Framework/API/src/FunctionFactory.cpp
+++ b/Framework/API/src/FunctionFactory.cpp
@@ -8,7 +8,7 @@
 #include "MantidAPI/Workspace.h"
 #include "MantidAPI/AnalysisDataService.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <sstream>
 
 namespace Mantid {
diff --git a/Framework/API/src/IFunction.cpp b/Framework/API/src/IFunction.cpp
index e4ae61a0176..cbbad9370db 100644
--- a/Framework/API/src/IFunction.cpp
+++ b/Framework/API/src/IFunction.cpp
@@ -24,7 +24,7 @@
 
 #include <boost/lexical_cast.hpp>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <limits>
 #include <sstream>
@@ -253,9 +253,10 @@ void IFunction::setHandler(FunctionHandler *handler) {
 
 /// Function to return all of the categories that contain this function
 const std::vector<std::string> IFunction::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   return std::vector<std::string>(tokenizer.begin(), tokenizer.end());
 }
 
@@ -572,8 +573,8 @@ protected:
         m_value.erase(m_value.size() - 1);
       }
     }
-    Poco::StringTokenizer tokenizer(m_value, ",",
-                                    Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer tokenizer(
+        m_value, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
     v.resize(tokenizer.count());
     for (size_t i = 0; i < v.size(); ++i) {
       v[i] = boost::lexical_cast<double>(tokenizer[i]);
diff --git a/Framework/API/src/ScriptRepositoryFactory.cpp b/Framework/API/src/ScriptRepositoryFactory.cpp
index 6fce2310fa7..c9e4a6a7890 100644
--- a/Framework/API/src/ScriptRepositoryFactory.cpp
+++ b/Framework/API/src/ScriptRepositoryFactory.cpp
@@ -1,7 +1,7 @@
 #include "MantidAPI/ScriptRepositoryFactory.h"
 #include "MantidAPI/ScriptRepository.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <sstream>
 
 namespace Mantid {
diff --git a/Framework/API/src/WorkspaceHistory.cpp b/Framework/API/src/WorkspaceHistory.cpp
index 6317194090d..9499348becf 100644
--- a/Framework/API/src/WorkspaceHistory.cpp
+++ b/Framework/API/src/WorkspaceHistory.cpp
@@ -201,7 +201,8 @@ void WorkspaceHistory::saveNexus(::NeXus::File *file) const {
  */
 void getWordsInString(const std::string &words3, std::string &w1,
                       std::string &w2, std::string &w3) {
-  Poco::StringTokenizer data(words3, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words3, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 3)
     throw std::out_of_range("Algorithm list line " + words3 +
                             " is not of the correct format\n");
@@ -224,7 +225,8 @@ void getWordsInString(const std::string &words3, std::string &w1,
  */
 void getWordsInString(const std::string &words4, std::string &w1,
                       std::string &w2, std::string &w3, std::string &w4) {
-  Poco::StringTokenizer data(words4, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words4, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 4)
     throw std::out_of_range("Algorithm list line " + words4 +
                             " is not of the correct format\n");
diff --git a/Framework/Algorithms/src/ReadGroupsFromFile.cpp b/Framework/Algorithms/src/ReadGroupsFromFile.cpp
index 7d577056aa5..ea16edbe7fa 100644
--- a/Framework/Algorithms/src/ReadGroupsFromFile.cpp
+++ b/Framework/Algorithms/src/ReadGroupsFromFile.cpp
@@ -222,7 +222,8 @@ void ReadGroupsFromFile::readXMLGroupingFile(const std::string &filename) {
 
     std::string ids = group->getAttribute("val");
 
-    Poco::StringTokenizer data(ids, ",", Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer data(
+        ids, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
 
     if (data.begin() != data.end()) {
       for (auto it = data.begin(); it != data.end(); ++it) {
diff --git a/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp b/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
index 7ef71507ce4..1bd9858b189 100644
--- a/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
+++ b/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
@@ -6,7 +6,7 @@
 #include <fstream>
 #include <sstream>
 #include <algorithm>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <boost/lexical_cast.hpp>
 #include <boost/algorithm/string/replace.hpp>
 
@@ -518,7 +518,7 @@ PlotPeakByLogValue::makeNames() const {
   double start = 0;
   double end = 0;
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer names(inputList, ";",
                   tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   for (auto it = names.begin(); it != names.end(); ++it) {
diff --git a/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h b/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
index 31d65508540..2325d4ebf6a 100644
--- a/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
+++ b/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
@@ -144,8 +144,10 @@ private:
     RangeHelper(){};
     /// give an enum from poco a better name here
     enum {
-      IGNORE_SPACES = Poco::StringTokenizer::TOK_TRIM ///< equal to
-      /// Poco::StringTokenizer::TOK_TRIM but
+      IGNORE_SPACES =
+          Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY ///< equal to
+      /// Mantid::Kernel::StringTokenizer::TOK_TRIM but
       /// saves some typing
     };
   };
@@ -239,8 +241,8 @@ private:
     /// spectrum number to the this
     EMPTY_LINE = 1001 - INT_MAX, ///< when reading from the input file this
     /// value means that we found any empty line
-    IGNORE_SPACES = Poco::StringTokenizer::TOK_TRIM ///< equal to
-    /// Poco::StringTokenizer::TOK_TRIM but
+    IGNORE_SPACES = Mantid::Kernel::StringTokenizer::TOK_TRIM ///< equal to
+    /// Mantid::Kernel::StringTokenizer::TOK_TRIM but
     /// saves some typing
   };
 
diff --git a/Framework/DataHandling/src/CheckMantidVersion.cpp b/Framework/DataHandling/src/CheckMantidVersion.cpp
index c64067efecf..75edac75394 100644
--- a/Framework/DataHandling/src/CheckMantidVersion.cpp
+++ b/Framework/DataHandling/src/CheckMantidVersion.cpp
@@ -6,7 +6,7 @@
 #include <Poco/DateTimeFormatter.h>
 #include <Poco/DateTimeFormat.h>
 #include <Poco/DateTimeParser.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 // jsoncpp
 #include <json/json.h>
@@ -175,9 +175,10 @@ CheckMantidVersion::cleanVersionTag(const std::string &versionTag) const {
 std::vector<int>
 CheckMantidVersion::splitVersionString(const std::string &versionString) const {
   std::vector<int> retVal;
-  Poco::StringTokenizer tokenizer(versionString, ".",
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      versionString, ".",
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   auto h = tokenizer.begin();
 
   for (; h != tokenizer.end(); ++h) {
diff --git a/Framework/DataHandling/src/GroupDetectors2.cpp b/Framework/DataHandling/src/GroupDetectors2.cpp
index 5fb850d4c53..7b469e40b3e 100644
--- a/Framework/DataHandling/src/GroupDetectors2.cpp
+++ b/Framework/DataHandling/src/GroupDetectors2.cpp
@@ -732,10 +732,13 @@ void GroupDetectors2::processMatrixWorkspace(
 */
 int GroupDetectors2::readInt(std::string line) {
   // remove comments and white space (TOK_TRIM)
-  Poco::StringTokenizer dataComment(line, "#", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer dataComment(
+      line, "#", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (dataComment.begin() != dataComment.end()) {
-    Poco::StringTokenizer data(*(dataComment.begin()), " ",
-                               Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer data(
+        *(dataComment.begin()), " ",
+        Mantid::Kernel::StringTokenizer::TOK_TRIM |
+            Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
     if (data.count() == 1) {
       if (!data[0].empty()) {
         try {
@@ -855,7 +858,7 @@ void GroupDetectors2::readSpectraIndexes(std::string line,
                                          std::vector<int64_t> &unUsedSpec,
                                          std::string seperator) {
   // remove comments and white space
-  Poco::StringTokenizer dataComment(line, seperator, IGNORE_SPACES);
+  Mantid::Kernel::StringTokenizer dataComment(line, seperator, IGNORE_SPACES);
   for (auto itr = dataComment.begin(); itr != dataComment.end(); ++itr) {
     std::vector<size_t> specNums;
     specNums.reserve(output.capacity());
@@ -1252,12 +1255,13 @@ void GroupDetectors2::RangeHelper::getList(const std::string &line,
                       // function
     return;
   }
-  Poco::StringTokenizer ranges(line, "-");
+  Mantid::Kernel::StringTokenizer ranges(line, "-");
 
   try {
     size_t loop = 0;
     do {
-      Poco::StringTokenizer beforeHyphen(ranges[loop], " ", IGNORE_SPACES);
+      Mantid::Kernel::StringTokenizer beforeHyphen(ranges[loop], " ",
+                                                   IGNORE_SPACES);
       auto readPostion = beforeHyphen.begin();
       if (readPostion == beforeHyphen.end()) {
         throw std::invalid_argument("'-' found at the start of a list, can't "
@@ -1273,7 +1277,8 @@ void GroupDetectors2::RangeHelper::getList(const std::string &line,
         break;
       }
 
-      Poco::StringTokenizer afterHyphen(ranges[loop + 1], " ", IGNORE_SPACES);
+      Mantid::Kernel::StringTokenizer afterHyphen(ranges[loop + 1], " ",
+                                                  IGNORE_SPACES);
       readPostion = afterHyphen.begin();
       if (readPostion == afterHyphen.end()) {
         throw std::invalid_argument("A '-' follows straight after another '-', "
diff --git a/Framework/DataHandling/src/ISISDataArchive.cpp b/Framework/DataHandling/src/ISISDataArchive.cpp
index 7bea0f139c1..3bbacf6e1c3 100644
--- a/Framework/DataHandling/src/ISISDataArchive.cpp
+++ b/Framework/DataHandling/src/ISISDataArchive.cpp
@@ -8,7 +8,7 @@
 
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Exception.h>
 
 #include <sstream>
diff --git a/Framework/DataHandling/src/LoadAscii.cpp b/Framework/DataHandling/src/LoadAscii.cpp
index ee39464e0c9..d2a9fa9fd07 100644
--- a/Framework/DataHandling/src/LoadAscii.cpp
+++ b/Framework/DataHandling/src/LoadAscii.cpp
@@ -12,7 +12,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 
diff --git a/Framework/DataHandling/src/LoadAscii2.cpp b/Framework/DataHandling/src/LoadAscii2.cpp
index 284ed879bc6..ea97c6347e5 100644
--- a/Framework/DataHandling/src/LoadAscii2.cpp
+++ b/Framework/DataHandling/src/LoadAscii2.cpp
@@ -12,7 +12,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 #include <boost/regex.hpp>
diff --git a/Framework/DataHandling/src/LoadNexusProcessed.cpp b/Framework/DataHandling/src/LoadNexusProcessed.cpp
index 556d63c54c7..d3e0af2882b 100644
--- a/Framework/DataHandling/src/LoadNexusProcessed.cpp
+++ b/Framework/DataHandling/src/LoadNexusProcessed.cpp
@@ -29,7 +29,7 @@
 #include <boost/lexical_cast.hpp>
 #include <boost/shared_array.hpp>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <nexus/NeXusException.hpp>
 
@@ -1762,7 +1762,8 @@ bool UDlesserExecCount(NXClassInfo elem1, NXClassInfo elem2) {
 void LoadNexusProcessed::getWordsInString(const std::string &words3,
                                           std::string &w1, std::string &w2,
                                           std::string &w3) {
-  Poco::StringTokenizer data(words3, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words3, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 3) {
     g_log.warning() << "Algorithm list line " + words3 +
                            " is not of the correct format\n";
@@ -1788,7 +1789,8 @@ void LoadNexusProcessed::getWordsInString(const std::string &words3,
 void LoadNexusProcessed::getWordsInString(const std::string &words4,
                                           std::string &w1, std::string &w2,
                                           std::string &w3, std::string &w4) {
-  Poco::StringTokenizer data(words4, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words4, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 4) {
     g_log.warning() << "Algorithm list line " + words4 +
                            " is not of the correct format\n";
diff --git a/Framework/DataHandling/src/LoadRKH.cpp b/Framework/DataHandling/src/LoadRKH.cpp
index d737c518fdb..8ee92fcf67d 100644
--- a/Framework/DataHandling/src/LoadRKH.cpp
+++ b/Framework/DataHandling/src/LoadRKH.cpp
@@ -15,7 +15,7 @@
 #include <boost/date_time/date_parsing.hpp>
 #include <boost/lexical_cast.hpp>
 #include <boost/algorithm/string.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <istream>
 
@@ -411,8 +411,9 @@ Progress LoadRKH::read2DHeader(const std::string &initalLine,
   if (fileLine.size() < 5) {
     std::getline(m_fileIn, fileLine);
   }
-  Poco::StringTokenizer wsDimensions(fileLine, " ",
-                                     Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer wsDimensions(
+      fileLine, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                         Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (wsDimensions.count() < 2) {
     throw Exception::NotFoundError("Input file", "dimensions");
   }
@@ -460,7 +461,9 @@ void LoadRKH::readNumEntrys(const int nEntries, MantidVec &output) {
 */
 const std::string LoadRKH::readUnit(const std::string &line) {
   // split the line into words
-  const Poco::StringTokenizer codes(line, " ", Poco::StringTokenizer::TOK_TRIM);
+  const Mantid::Kernel::StringTokenizer codes(
+      line, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (codes.count() < 1) {
     return "C++ no unit found";
   }
diff --git a/Framework/DataHandling/src/LoadReflTBL.cpp b/Framework/DataHandling/src/LoadReflTBL.cpp
index 260e726dfd8..c776e0ed0b9 100644
--- a/Framework/DataHandling/src/LoadReflTBL.cpp
+++ b/Framework/DataHandling/src/LoadReflTBL.cpp
@@ -9,7 +9,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 
diff --git a/Framework/DataHandling/src/LoadSpice2D.cpp b/Framework/DataHandling/src/LoadSpice2D.cpp
index 4312bfd53be..3d17fe96951 100644
--- a/Framework/DataHandling/src/LoadSpice2D.cpp
+++ b/Framework/DataHandling/src/LoadSpice2D.cpp
@@ -15,7 +15,7 @@
 #include <boost/regex.hpp>
 #include <boost/shared_array.hpp>
 #include <Poco/Path.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/DOM/DOMParser.h>
 #include <Poco/DOM/Document.h>
 #include <Poco/DOM/Element.h>
diff --git a/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp b/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
index 04f7103e3cb..82ea9cd7f3f 100644
--- a/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
+++ b/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
@@ -17,7 +17,7 @@
 #include <boost/scoped_ptr.hpp>
 #include <boost/algorithm/string/predicate.hpp>
 #include <nexus/NeXusException.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <fstream>
 
@@ -311,7 +311,8 @@ bool UpdateInstrumentFromFile::parseAsciiHeader(
                                 "property is empty, cannot interpret columns");
   }
 
-  Poco::StringTokenizer splitter(header, ",", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer splitter(
+      header, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   headerInfo.colCount = splitter.count();
   auto it =
       splitter.begin(); // First column must be spectrum number or detector ID
diff --git a/Framework/DataHandling/test/SaveParameterFileTest.h b/Framework/DataHandling/test/SaveParameterFileTest.h
index 29cd23889e5..6ce13bf6c08 100644
--- a/Framework/DataHandling/test/SaveParameterFileTest.h
+++ b/Framework/DataHandling/test/SaveParameterFileTest.h
@@ -136,7 +136,7 @@ public:
         param->value<FitParameter>();
 
     // Info about fitting parameter is in string value, see FitParameter class
-    typedef Poco::StringTokenizer tokenizer;
+    typedef Mantid::Kernel::StringTokenizer tokenizer;
     tokenizer values(value, ",", tokenizer::TOK_TRIM);
     TS_ASSERT_EQUALS(fitParam.getFormula(), values[7]);
     TS_ASSERT_EQUALS(fitParam.getFunction(), values[1]);
diff --git a/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h b/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
index 07507db2714..248070ef7be 100644
--- a/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
+++ b/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
@@ -212,7 +212,8 @@ template <class Type> std::string ParameterType<Type>::asString() const {
  */
 template <class Type>
 void ParameterType<Type>::fromString(const std::string &value) {
-  std::istringstream istr(value);
+  std::string newvalue(value.begin(), value.end() - 1);
+  std::istringstream istr(newvalue);
   istr >> m_value;
 }
 
diff --git a/Framework/Geometry/src/Instrument/FitParameter.cpp b/Framework/Geometry/src/Instrument/FitParameter.cpp
index ff04884990c..a8f5996da8b 100644
--- a/Framework/Geometry/src/Instrument/FitParameter.cpp
+++ b/Framework/Geometry/src/Instrument/FitParameter.cpp
@@ -4,7 +4,7 @@
 #include "MantidGeometry/Instrument/FitParameter.h"
 #include "MantidGeometry/Instrument/Parameter.h"
 #include "MantidGeometry/Instrument/ParameterFactory.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include "MantidGeometry/muParser_Silent.h"
 
 namespace Mantid {
@@ -162,7 +162,7 @@ std::ostream &operator<<(std::ostream &os, const FitParameter &f) {
 */
 std::istream &operator>>(std::istream &in, FitParameter &f) {
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   std::string str;
   getline(in, str);
   tokenizer values(str, ",", tokenizer::TOK_TRIM);
diff --git a/Framework/Kernel/CMakeLists.txt b/Framework/Kernel/CMakeLists.txt
index 994539535d5..9931bd15a68 100644
--- a/Framework/Kernel/CMakeLists.txt
+++ b/Framework/Kernel/CMakeLists.txt
@@ -244,6 +244,7 @@ set ( INC_FILES
 	inc/MantidKernel/StdoutChannel.h
 	inc/MantidKernel/StringContainsValidator.h
 	inc/MantidKernel/Strings.h
+	inc/MantidKernel/StringTokenizer.h
 	inc/MantidKernel/System.h
 	inc/MantidKernel/Task.h
 	inc/MantidKernel/TestChannel.h
diff --git a/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h b/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
index cef85abbfab..c0a3af55391 100644
--- a/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
+++ b/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
@@ -15,7 +15,7 @@
 #include <boost/shared_ptr.hpp>
 #endif
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <vector>
 #include "MantidKernel/IPropertySettings.h"
 
@@ -149,7 +149,7 @@ void toValue(const std::string &, boost::shared_ptr<T> &) {
 template <typename T>
 void toValue(const std::string &strvalue, std::vector<T> &value) {
   // Split up comma-separated properties
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer values(strvalue, ",",
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
 
@@ -165,7 +165,7 @@ template <typename T>
 void toValue(const std::string &strvalue, std::vector<std::vector<T>> &value,
              const std::string &outerDelimiter = ",",
              const std::string &innerDelimiter = "+") {
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer tokens(strvalue, outerDelimiter,
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
 
@@ -199,7 +199,7 @@ template <typename T> T extractToValueVector(const std::string &strvalue) {
   template <>                                                                  \
   inline void toValue<type>(const std::string &strvalue,                       \
                             std::vector<type> &value) {                        \
-    typedef Poco::StringTokenizer tokenizer;                                   \
+    typedef Mantid::Kernel::StringTokenizer tokenizer;                         \
     tokenizer values(strvalue, ",",                                            \
                      tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);       \
     value.clear();                                                             \
@@ -213,9 +213,22 @@ PROPERTYWITHVALUE_TOVALUE(int)
 PROPERTYWITHVALUE_TOVALUE(long)
 PROPERTYWITHVALUE_TOVALUE(uint32_t)
 PROPERTYWITHVALUE_TOVALUE(uint64_t)
-#if defined(__APPLE__)
-PROPERTYWITHVALUE_TOVALUE(unsigned long);
-#endif
+//#if defined(__APPLE__)
+// PROPERTYWITHVALUE_TOVALUE(unsigned long);
+//#endif
+
+template <>
+inline void toValue<unsigned long>(const std::string &strvalue,
+                                   std::vector<unsigned long> &value) {
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
+  tokenizer values(strvalue, ",",
+                   tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
+  value.clear();
+  value.reserve(values.count());
+  for (tokenizer::Iterator it = values.begin(); it != values.end(); ++it) {
+    appendValue(*it, value);
+  }
+}
 
 // Clear up the namespace
 #undef PROPERTYWITHVALUE_TOVALUE
diff --git a/Framework/Kernel/inc/MantidKernel/StringTokenizer.h b/Framework/Kernel/inc/MantidKernel/StringTokenizer.h
new file mode 100644
index 00000000000..ac3b574da1a
--- /dev/null
+++ b/Framework/Kernel/inc/MantidKernel/StringTokenizer.h
@@ -0,0 +1,96 @@
+//
+//  StringTokenizer.h
+//  Mantid
+//
+//  Created by Hahn, Steven E. on 1/29/16.
+//
+//
+
+#ifndef StringTokenizer_h
+#define StringTokenizer_h
+
+#include <Poco/StringTokenizer.h>
+#include "MantidKernel/make_unique.h"
+
+namespace Mantid {
+namespace Kernel {
+
+class StringTokenizer
+    /// A simple tokenizer that splits a string into
+    /// tokens, which are separated by separator characters.
+    /// An iterator is used to iterate over all tokens.
+    {
+public:
+  enum Options {
+    TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
+    TOK_TRIM = 2          /// remove leading and trailing whitespace from tokens
+  };
+
+  typedef Poco::StringTokenizer::TokenVec TokenVec;
+  typedef Poco::StringTokenizer::Iterator Iterator;
+  StringTokenizer(const std::string &str, const std::string &separators,
+                  int options = 0){/*if(str.back() == ' '){
+              std::string newstr(str,0,str.size()-1);
+              m_tokenizer =
+Mantid::Kernel::make_unique<Mantid::Kernel::StringTokenizer>(newstr,separators,options);
+            }
+            else*/ {
+      m_tokenizer = Mantid::Kernel::make_unique<Poco::StringTokenizer>(
+          str, separators, options);
+}
+};
+/// Splits the given string into tokens. The tokens are expected to be
+/// separated by one of the separator characters given in separators.
+/// Additionally, options can be specified:
+///   * TOK_IGNORE_EMPTY: empty tokens are ignored
+///   * TOK_TRIM: trailing and leading whitespace is removed from tokens.
+
+~StringTokenizer() = default;
+/// Destroys the tokenizer.
+
+Iterator begin() const { return m_tokenizer->begin(); };
+Iterator end() const { return m_tokenizer->end(); };
+
+const std::string &operator[](std::size_t index) const {
+  return (*m_tokenizer)[index];
+};
+/// Returns const reference the index'th token.
+/// Throws a RangeException if the index is out of range.
+
+std::string &operator[](std::size_t index) { return (*m_tokenizer)[index]; };
+/// Returns reference to the index'th token.
+/// Throws a RangeException if the index is out of range.
+
+bool has(const std::string &token) const { return m_tokenizer->has(token); };
+/// Returns true if token exists, false otherwise.
+
+std::size_t find(const std::string &token, std::size_t pos = 0) const {
+  return m_tokenizer->find(token, pos);
+};
+/// Returns the index of the first occurrence of the token
+/// starting at position pos.
+/// Throws a NotFoundException if the token is not found.
+
+std::size_t replace(const std::string &oldToken, const std::string &newToken,
+                    std::size_t pos = 0) {
+  return m_tokenizer->replace(oldToken, newToken, pos);
+};
+/// Starting at position pos, replaces all subsequent tokens having value
+/// equal to oldToken with newToken.
+/// Returns the number of modified tokens.
+
+std::size_t count() const { return m_tokenizer->count(); };
+/// Returns the total number of tokens.
+
+std::size_t count(const std::string &token) const {
+  return m_tokenizer->count(token);
+};
+/// Returns the number of tokens equal to the specified token.
+
+private:
+std::unique_ptr<Poco::StringTokenizer> m_tokenizer;
+};
+}
+}
+
+#endif /* StringTokenizer_h */
diff --git a/Framework/Kernel/src/ConfigService.cpp b/Framework/Kernel/src/ConfigService.cpp
index 71f6ee254fa..449132fa632 100644
--- a/Framework/Kernel/src/ConfigService.cpp
+++ b/Framework/Kernel/src/ConfigService.cpp
@@ -19,7 +19,7 @@
 #include <Poco/LoggingFactory.h>
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/DOM/DOMParser.h>
 #include <Poco/DOM/Document.h>
 #include <Poco/DOM/NodeList.h>
@@ -78,11 +78,11 @@ void splitPath(const std::string &path, std::vector<std::string> &splitted) {
     return;
   }
 
-  int options =
-      Poco::StringTokenizer::TOK_TRIM + Poco::StringTokenizer::TOK_IGNORE_EMPTY;
+  int options = Mantid::Kernel::StringTokenizer::TOK_TRIM +
+                Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY;
 
   splitted.clear();
-  Poco::StringTokenizer tokenizer(path, ";,", options);
+  Mantid::Kernel::StringTokenizer tokenizer(path, ";,", options);
   auto iend = tokenizer.end();
   splitted.reserve(tokenizer.count());
   for (auto itr = tokenizer.begin(); itr != iend; ++itr) {
diff --git a/Framework/Kernel/src/FacilityInfo.cpp b/Framework/Kernel/src/FacilityInfo.cpp
index e652d906647..8740399e1c4 100644
--- a/Framework/Kernel/src/FacilityInfo.cpp
+++ b/Framework/Kernel/src/FacilityInfo.cpp
@@ -13,7 +13,7 @@
 
 #include <Poco/DOM/Element.h>
 #include <Poco/DOM/NodeList.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 using Poco::XML::Element;
 
@@ -71,7 +71,7 @@ void FacilityInfo::fillExtensions(const Poco::XML::Element *elem) {
     g_log.error("No file extensions defined");
     throw std::runtime_error("No file extensions defined");
   }
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer exts(extsStr, ",",
                  tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   for (auto it = exts.begin(); it != exts.end(); ++it) {
diff --git a/Framework/Kernel/src/FilterChannel.cpp b/Framework/Kernel/src/FilterChannel.cpp
index c6dab197e19..2986b221165 100644
--- a/Framework/Kernel/src/FilterChannel.cpp
+++ b/Framework/Kernel/src/FilterChannel.cpp
@@ -2,7 +2,7 @@
 #include "MantidKernel/FilterChannel.h"
 
 #include <Poco/LoggingRegistry.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Message.h>
 
 namespace Poco {
diff --git a/Framework/Kernel/src/Interpolation.cpp b/Framework/Kernel/src/Interpolation.cpp
index 5ccaa452831..1307286d3bb 100644
--- a/Framework/Kernel/src/Interpolation.cpp
+++ b/Framework/Kernel/src/Interpolation.cpp
@@ -1,7 +1,7 @@
 #include "MantidKernel/Interpolation.h"
 #include "MantidKernel/Logger.h"
 #include "MantidKernel/UnitFactory.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace Kernel {
@@ -170,7 +170,7 @@ std::ostream &operator<<(std::ostream &os, const Interpolation &f) {
 */
 std::istream &operator>>(std::istream &in, Interpolation &f) {
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   std::string str;
   getline(in, str);
   tokenizer values(str, ";", tokenizer::TOK_TRIM);
diff --git a/Framework/Kernel/src/Strings.cpp b/Framework/Kernel/src/Strings.cpp
index 74e8eb569e9..3dc65243628 100644
--- a/Framework/Kernel/src/Strings.cpp
+++ b/Framework/Kernel/src/Strings.cpp
@@ -1,7 +1,7 @@
 #include "MantidKernel/Strings.h"
 #include "MantidKernel/UnitLabel.h"
+#include "MantidKernel/StringTokenizer.h"
 
-#include <Poco/StringTokenizer.h>
 #include <Poco/Path.h>
 
 #include <boost/algorithm/string.hpp>
@@ -436,11 +436,12 @@ std::map<std::string, std::string>
 splitToKeyValues(const std::string &input, const std::string &keyValSep,
                  const std::string &listSep) {
   std::map<std::string, std::string> keyValues;
-  const int splitOptions =
-      Poco::StringTokenizer::TOK_IGNORE_EMPTY + Poco::StringTokenizer::TOK_TRIM;
-  Poco::StringTokenizer listSplitter(input, listSep);
+  const int splitOptions = Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY +
+                           Mantid::Kernel::StringTokenizer::TOK_TRIM;
+  Mantid::Kernel::StringTokenizer listSplitter(input, listSep);
   for (auto iter = listSplitter.begin(); iter != listSplitter.end(); ++iter) {
-    Poco::StringTokenizer keyValSplitter(*iter, keyValSep, splitOptions);
+    Mantid::Kernel::StringTokenizer keyValSplitter(*iter, keyValSep,
+                                                   splitOptions);
     if (keyValSplitter.count() == 2) {
       keyValues[keyValSplitter[0]] = keyValSplitter[1];
     }
@@ -1045,24 +1046,14 @@ int isMember(const std::vector<std::string> &group,
  */
 std::vector<int> parseRange(const std::string &str, const std::string &elemSep,
                             const std::string &rangeSep) {
-  typedef Poco::StringTokenizer Tokenizer;
+  typedef Mantid::Kernel::StringTokenizer Tokenizer;
 
   boost::shared_ptr<Tokenizer> elements;
 
   if (elemSep.find(' ') != std::string::npos) {
-    // If element separator contains space character it's a special case,
-    // because in that case
-    // it is allowed to have element separator inside a range, e.g. "4 - 5", but
-    // not "4,-5"
-
-    // Space is added so that last empty element of the "1,2,3-" is not ignored
-    // and we can
-    // spot the error. Behaviour is changed in Poco 1.5 and this will not be
-    // needed.
-    Tokenizer ranges(str + " ", rangeSep, Tokenizer::TOK_TRIM);
+    Tokenizer ranges(str, rangeSep, Tokenizer::TOK_TRIM);
     std::string new_str =
         join(ranges.begin(), ranges.end(), rangeSep.substr(0, 1));
-
     elements = boost::make_shared<Tokenizer>(
         new_str, elemSep, Tokenizer::TOK_IGNORE_EMPTY | Tokenizer::TOK_TRIM);
   } else {
@@ -1076,8 +1067,7 @@ std::vector<int> parseRange(const std::string &str, const std::string &elemSep,
   result.reserve(elements->count());
 
   for (auto it = elements->begin(); it != elements->end(); it++) {
-    // See above for the reason space is added
-    Tokenizer rangeElements(*it + " ", rangeSep, Tokenizer::TOK_TRIM);
+    Tokenizer rangeElements(*it, rangeSep, Tokenizer::TOK_TRIM);
 
     size_t noOfRangeElements = rangeElements.count();
 
diff --git a/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp b/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
index 8fe24a9e938..df5e2bad756 100644
--- a/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
+++ b/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
@@ -36,7 +36,7 @@ void BoxControllerSettingsAlgorithm::initBoxControllerProps(
 
   // Split up comma-separated properties
   std::vector<int> value;
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer values(SplitInto, ",",
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   value.clear();
diff --git a/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp b/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
index 87910596d46..ff488bb337e 100644
--- a/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
+++ b/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
@@ -6,7 +6,7 @@
 #include "MantidKernel/Property.h"
 #include "MantidKernel/PropertyWithValue.h"
 #include "MantidDataObjects/Workspace2D.h"
-#include "Poco/StringTokenizer.h"
+#include "MantidKernel/StringTokenizer.h"
 #include "Poco/NumberParser.h"
 
 namespace Mantid {
@@ -97,8 +97,8 @@ double getSourceToSampleDistance(API::MatrixWorkspace_sptr dataWS) {
         "Unable to find [aperture-distances] instrument parameter");
 
   double SSD = 0;
-  Poco::StringTokenizer tok(pars[0], ",",
-                            Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tok(
+      pars[0], ",", Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (tok.count() > 0 && tok.count() < 10 && nguides >= 0 && nguides < 9) {
     const std::string distance_as_string = tok[8 - nguides];
     if (!Poco::NumberParser::tryParseFloat(distance_as_string, SSD))
diff --git a/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp b/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
index 83ea4deccea..8a997bd9c34 100644
--- a/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
+++ b/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
@@ -33,7 +33,7 @@
 
 #include <Poco/File.h>
 #include <Poco/Path.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <boost/lexical_cast.hpp>
 #include <boost/assign.hpp>
diff --git a/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp b/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
index e61ab45ee96..7a1d0c3a16e 100644
--- a/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
+++ b/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
@@ -39,7 +39,7 @@
 #include <QDesktopServices>
 #include <QUrl>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Message.h>
 
 #include <boost/lexical_cast.hpp>
@@ -3311,9 +3311,11 @@ void SANSRunWindow::checkList() {
 
   bool valid(false);
   // split up the comma separated list ignoring spaces
-  Poco::StringTokenizer in(input, ",", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer in(input, ",",
+                                     Mantid::Kernel::StringTokenizer::TOK_TRIM);
   try {
-    for (Poco::StringTokenizer::Iterator i = in.begin(), end = in.end();
+    for (Mantid::Kernel::StringTokenizer::Iterator i = in.begin(),
+                                                   end = in.end();
          i != end; ++i) { // try a lexical cast, we don't need its result only
                           // if there was an error
       boost::lexical_cast<double>(*i);
-- 
GitLab