diff --git a/Framework/API/src/Algorithm.cpp b/Framework/API/src/Algorithm.cpp
index 01687e116535e5f1ab4fedb890de1f2919643fd9..3bc5b1f36c4e6a87a60000b4cc6a0c695abf3895 100644
--- a/Framework/API/src/Algorithm.cpp
+++ b/Framework/API/src/Algorithm.cpp
@@ -26,7 +26,7 @@
 #include <Poco/ActiveResult.h>
 #include <Poco/NotificationCenter.h>
 #include <Poco/RWLock.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Void.h>
 
 #include <json/json.h>
@@ -204,9 +204,10 @@ void Algorithm::progress(double p, const std::string &msg, double estimatedTime,
 //---------------------------------------------------------------------------------------------
 /// Function to return all of the categories that contain this algorithm
 const std::vector<std::string> Algorithm::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
 
   std::vector<std::string> res(tokenizer.begin(), tokenizer.end());
 
@@ -230,10 +231,10 @@ const std::string Algorithm::workspaceMethodName() const { return ""; }
  *workspaceMethodName attached
  */
 const std::vector<std::string> Algorithm::workspaceMethodOn() const {
-  Poco::StringTokenizer tokenizer(this->workspaceMethodOnTypes(),
-                                  WORKSPACE_TYPES_SEPARATOR,
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      this->workspaceMethodOnTypes(), WORKSPACE_TYPES_SEPARATOR,
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   std::vector<std::string> res;
   res.reserve(tokenizer.count());
   for (auto iter = tokenizer.begin(); iter != tokenizer.end(); ++iter) {
diff --git a/Framework/API/src/AlgorithmFactory.cpp b/Framework/API/src/AlgorithmFactory.cpp
index 099150e852c77cf245af3d51032366d44c029bf5..1c931df2d06c093fb24e03022a7ba4ee9167f956 100644
--- a/Framework/API/src/AlgorithmFactory.cpp
+++ b/Framework/API/src/AlgorithmFactory.cpp
@@ -7,7 +7,7 @@
 #include "MantidKernel/LibraryManager.h"
 #include "MantidKernel/ConfigService.h"
 
-#include "Poco/StringTokenizer.h"
+#include "MantidKernel/StringTokenizer.h"
 
 namespace Mantid {
 namespace API {
@@ -391,9 +391,10 @@ void AlgorithmFactoryImpl::fillHiddenCategories(
     std::set<std::string> *categorySet) const {
   std::string categoryString = Kernel::ConfigService::Instance().getString(
       "algorithms.categories.hidden");
-  Poco::StringTokenizer tokenizer(categoryString, ";",
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      categoryString, ";",
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   std::copy(tokenizer.begin(), tokenizer.end(),
             std::inserter(*categorySet, categorySet->end()));
 }
diff --git a/Framework/API/src/AlgorithmProxy.cpp b/Framework/API/src/AlgorithmProxy.cpp
index 7ea45e33594c946d84f710bba3bd3b64686663c6..39cf34afb1fe4750060c9c5bfc82e9f90f88d376 100644
--- a/Framework/API/src/AlgorithmProxy.cpp
+++ b/Framework/API/src/AlgorithmProxy.cpp
@@ -309,9 +309,10 @@ std::string AlgorithmProxy::toString() const {
 
 /// Function to return all of the categories that contain this algorithm
 const std::vector<std::string> AlgorithmProxy::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
 
   std::vector<std::string> res(tokenizer.begin(), tokenizer.end());
 
diff --git a/Framework/API/src/ConstraintFactory.cpp b/Framework/API/src/ConstraintFactory.cpp
index 8500cc000b9a2b6a6be62a968a1ff0c5b9e3dde6..4f42deb76473bd4d68f5d7f4690a03ca3cbd85b3 100644
--- a/Framework/API/src/ConstraintFactory.cpp
+++ b/Framework/API/src/ConstraintFactory.cpp
@@ -2,7 +2,7 @@
 #include "MantidAPI/Expression.h"
 #include "MantidAPI/IConstraint.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace API {
diff --git a/Framework/API/src/ExperimentInfo.cpp b/Framework/API/src/ExperimentInfo.cpp
index 90a35d1f73f23016639ca42ea97a64646c8c3c45..5f510975197722983a3ca2f41b1de6f7aa851ce0 100644
--- a/Framework/API/src/ExperimentInfo.cpp
+++ b/Framework/API/src/ExperimentInfo.cpp
@@ -1134,14 +1134,14 @@ void ExperimentInfo::readParameterMap(const std::string &parameterStr) {
   Geometry::ParameterMap &pmap = this->instrumentParameters();
   Instrument_const_sptr instr = this->getInstrument()->baseInstrument();
 
-  int options = Poco::StringTokenizer::TOK_IGNORE_EMPTY;
-  options += Poco::StringTokenizer::TOK_TRIM;
-  Poco::StringTokenizer splitter(parameterStr, "|", options);
+  int options = Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY;
+  options += Mantid::Kernel::StringTokenizer::TOK_TRIM;
+  Mantid::Kernel::StringTokenizer splitter(parameterStr, "|", options);
 
   auto iend = splitter.end();
   // std::string prev_name;
   for (auto itr = splitter.begin(); itr != iend; ++itr) {
-    Poco::StringTokenizer tokens(*itr, ";");
+    Mantid::Kernel::StringTokenizer tokens(*itr, ";");
     if (tokens.count() < 4)
       continue;
     std::string comp_name = tokens[0];
diff --git a/Framework/API/src/Expression.cpp b/Framework/API/src/Expression.cpp
index a3d59245d9438eaa20be89de492fa8b7a42a8f01..6402b9a3c9081161d5265c4624c12c2bf8cd8c5e 100644
--- a/Framework/API/src/Expression.cpp
+++ b/Framework/API/src/Expression.cpp
@@ -4,12 +4,12 @@
 
 #include "MantidAPI/Expression.h"
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace API {
 
-typedef Poco::StringTokenizer tokenizer;
+typedef Mantid::Kernel::StringTokenizer tokenizer;
 
 const std::string DEFAULT_OPS_STR[] = {";", ",", "=", "== != > < <= >=",
                                        "&& || ^^", "+ -", "* /", "^"};
diff --git a/Framework/API/src/FileFinder.cpp b/Framework/API/src/FileFinder.cpp
index 2c77a6ef042436bbb6d17dd19368a1e899af190a..4df913f499cece50d4b81aeff176af5d201e3324 100644
--- a/Framework/API/src/FileFinder.cpp
+++ b/Framework/API/src/FileFinder.cpp
@@ -14,7 +14,7 @@
 
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Exception.h>
 #include <boost/regex.hpp>
 #include <boost/lexical_cast.hpp>
@@ -597,9 +597,9 @@ FileFinderImpl::findRuns(const std::string &hintstr) const {
   std::string hint = Kernel::Strings::strip(hintstr);
   g_log.debug() << "findRuns hint = " << hint << "\n";
   std::vector<std::string> res;
-  Poco::StringTokenizer hints(hint, ",",
-                              Poco::StringTokenizer::TOK_TRIM |
-                                  Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer hints(
+      hint, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   auto h = hints.begin();
 
   for (; h != hints.end(); ++h) {
@@ -616,9 +616,9 @@ FileFinderImpl::findRuns(const std::string &hintstr) const {
       fileSuspected = true;
     }
 
-    Poco::StringTokenizer range(*h, "-",
-                                Poco::StringTokenizer::TOK_TRIM |
-                                    Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+    Mantid::Kernel::StringTokenizer range(
+        *h, "-", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
     if ((range.count() > 2) && (!fileSuspected)) {
       throw std::invalid_argument("Malformed range of runs: " + *h);
     } else if ((range.count() == 2) && (!fileSuspected)) {
diff --git a/Framework/API/src/FunctionFactory.cpp b/Framework/API/src/FunctionFactory.cpp
index c44e9ec25be3110161d9d5e623afcf90208ddd76..4a7c7ba8361900c7cf2b44f41197edf7b4811956 100644
--- a/Framework/API/src/FunctionFactory.cpp
+++ b/Framework/API/src/FunctionFactory.cpp
@@ -8,7 +8,7 @@
 #include "MantidAPI/Workspace.h"
 #include "MantidAPI/AnalysisDataService.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <sstream>
 
 namespace Mantid {
diff --git a/Framework/API/src/IFunction.cpp b/Framework/API/src/IFunction.cpp
index e4ae61a01763abcc9fcb5557d9d2144b6bd5e28e..cbbad9370db53bae57292a3727873acc33cc24cb 100644
--- a/Framework/API/src/IFunction.cpp
+++ b/Framework/API/src/IFunction.cpp
@@ -24,7 +24,7 @@
 
 #include <boost/lexical_cast.hpp>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <limits>
 #include <sstream>
@@ -253,9 +253,10 @@ void IFunction::setHandler(FunctionHandler *handler) {
 
 /// Function to return all of the categories that contain this function
 const std::vector<std::string> IFunction::categories() const {
-  Poco::StringTokenizer tokenizer(category(), categorySeparator(),
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      category(), categorySeparator(),
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   return std::vector<std::string>(tokenizer.begin(), tokenizer.end());
 }
 
@@ -572,8 +573,8 @@ protected:
         m_value.erase(m_value.size() - 1);
       }
     }
-    Poco::StringTokenizer tokenizer(m_value, ",",
-                                    Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer tokenizer(
+        m_value, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
     v.resize(tokenizer.count());
     for (size_t i = 0; i < v.size(); ++i) {
       v[i] = boost::lexical_cast<double>(tokenizer[i]);
diff --git a/Framework/API/src/ScriptRepositoryFactory.cpp b/Framework/API/src/ScriptRepositoryFactory.cpp
index 6fce2310fa7be303c8eb316b8438049992bc3280..c9e4a6a7890271893690edf3ec1781ee6b24b46a 100644
--- a/Framework/API/src/ScriptRepositoryFactory.cpp
+++ b/Framework/API/src/ScriptRepositoryFactory.cpp
@@ -1,7 +1,7 @@
 #include "MantidAPI/ScriptRepositoryFactory.h"
 #include "MantidAPI/ScriptRepository.h"
 #include "MantidKernel/LibraryManager.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <sstream>
 
 namespace Mantid {
diff --git a/Framework/API/src/WorkspaceHistory.cpp b/Framework/API/src/WorkspaceHistory.cpp
index 6317194090d433ffa106e514dd6edb83c9641739..9499348becf85b6c3dd09bb0ecd13cf1aa8e215d 100644
--- a/Framework/API/src/WorkspaceHistory.cpp
+++ b/Framework/API/src/WorkspaceHistory.cpp
@@ -201,7 +201,8 @@ void WorkspaceHistory::saveNexus(::NeXus::File *file) const {
  */
 void getWordsInString(const std::string &words3, std::string &w1,
                       std::string &w2, std::string &w3) {
-  Poco::StringTokenizer data(words3, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words3, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 3)
     throw std::out_of_range("Algorithm list line " + words3 +
                             " is not of the correct format\n");
@@ -224,7 +225,8 @@ void getWordsInString(const std::string &words3, std::string &w1,
  */
 void getWordsInString(const std::string &words4, std::string &w1,
                       std::string &w2, std::string &w3, std::string &w4) {
-  Poco::StringTokenizer data(words4, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words4, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 4)
     throw std::out_of_range("Algorithm list line " + words4 +
                             " is not of the correct format\n");
diff --git a/Framework/Algorithms/src/ReadGroupsFromFile.cpp b/Framework/Algorithms/src/ReadGroupsFromFile.cpp
index 7d577056aa5709f0d32f561ea45235a9dd4d29ce..ea16edbe7fa8811a799fdc82f9003cb332e88bd2 100644
--- a/Framework/Algorithms/src/ReadGroupsFromFile.cpp
+++ b/Framework/Algorithms/src/ReadGroupsFromFile.cpp
@@ -222,7 +222,8 @@ void ReadGroupsFromFile::readXMLGroupingFile(const std::string &filename) {
 
     std::string ids = group->getAttribute("val");
 
-    Poco::StringTokenizer data(ids, ",", Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer data(
+        ids, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
 
     if (data.begin() != data.end()) {
       for (auto it = data.begin(); it != data.end(); ++it) {
diff --git a/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp b/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
index 7ef71507ce4a7ebd41d4e37c0beeeb248b9080b9..1bd9858b189f121fbf14ad1ad86393e0b9ddd6e1 100644
--- a/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
+++ b/Framework/CurveFitting/src/Algorithms/PlotPeakByLogValue.cpp
@@ -6,7 +6,7 @@
 #include <fstream>
 #include <sstream>
 #include <algorithm>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <boost/lexical_cast.hpp>
 #include <boost/algorithm/string/replace.hpp>
 
@@ -518,7 +518,7 @@ PlotPeakByLogValue::makeNames() const {
   double start = 0;
   double end = 0;
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer names(inputList, ";",
                   tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   for (auto it = names.begin(); it != names.end(); ++it) {
diff --git a/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h b/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
index 31d655085404dd2d100e58e9425f4bd00f09a558..2325d4ebf6a9ddcf115da0f3a985aee53887b2ba 100644
--- a/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
+++ b/Framework/DataHandling/inc/MantidDataHandling/GroupDetectors2.h
@@ -144,8 +144,10 @@ private:
     RangeHelper(){};
     /// give an enum from poco a better name here
     enum {
-      IGNORE_SPACES = Poco::StringTokenizer::TOK_TRIM ///< equal to
-      /// Poco::StringTokenizer::TOK_TRIM but
+      IGNORE_SPACES =
+          Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY ///< equal to
+      /// Mantid::Kernel::StringTokenizer::TOK_TRIM but
       /// saves some typing
     };
   };
@@ -239,8 +241,8 @@ private:
     /// spectrum number to the this
     EMPTY_LINE = 1001 - INT_MAX, ///< when reading from the input file this
     /// value means that we found any empty line
-    IGNORE_SPACES = Poco::StringTokenizer::TOK_TRIM ///< equal to
-    /// Poco::StringTokenizer::TOK_TRIM but
+    IGNORE_SPACES = Mantid::Kernel::StringTokenizer::TOK_TRIM ///< equal to
+    /// Mantid::Kernel::StringTokenizer::TOK_TRIM but
     /// saves some typing
   };
 
diff --git a/Framework/DataHandling/src/CheckMantidVersion.cpp b/Framework/DataHandling/src/CheckMantidVersion.cpp
index c64067efecffda8d31339cdcc36af2b7b1c24045..75edac7539474b576e19d3fe6fddb98c29fa4ec0 100644
--- a/Framework/DataHandling/src/CheckMantidVersion.cpp
+++ b/Framework/DataHandling/src/CheckMantidVersion.cpp
@@ -6,7 +6,7 @@
 #include <Poco/DateTimeFormatter.h>
 #include <Poco/DateTimeFormat.h>
 #include <Poco/DateTimeParser.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 // jsoncpp
 #include <json/json.h>
@@ -175,9 +175,10 @@ CheckMantidVersion::cleanVersionTag(const std::string &versionTag) const {
 std::vector<int>
 CheckMantidVersion::splitVersionString(const std::string &versionString) const {
   std::vector<int> retVal;
-  Poco::StringTokenizer tokenizer(versionString, ".",
-                                  Poco::StringTokenizer::TOK_TRIM |
-                                      Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tokenizer(
+      versionString, ".",
+      Mantid::Kernel::StringTokenizer::TOK_TRIM |
+          Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   auto h = tokenizer.begin();
 
   for (; h != tokenizer.end(); ++h) {
diff --git a/Framework/DataHandling/src/GroupDetectors2.cpp b/Framework/DataHandling/src/GroupDetectors2.cpp
index 5fb850d4c53f19a9188cc9364fc39fdcb88c2c68..7b469e40b3e094b1aec5b9ee01ce4c12555473b6 100644
--- a/Framework/DataHandling/src/GroupDetectors2.cpp
+++ b/Framework/DataHandling/src/GroupDetectors2.cpp
@@ -732,10 +732,13 @@ void GroupDetectors2::processMatrixWorkspace(
 */
 int GroupDetectors2::readInt(std::string line) {
   // remove comments and white space (TOK_TRIM)
-  Poco::StringTokenizer dataComment(line, "#", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer dataComment(
+      line, "#", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (dataComment.begin() != dataComment.end()) {
-    Poco::StringTokenizer data(*(dataComment.begin()), " ",
-                               Poco::StringTokenizer::TOK_TRIM);
+    Mantid::Kernel::StringTokenizer data(
+        *(dataComment.begin()), " ",
+        Mantid::Kernel::StringTokenizer::TOK_TRIM |
+            Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
     if (data.count() == 1) {
       if (!data[0].empty()) {
         try {
@@ -855,7 +858,7 @@ void GroupDetectors2::readSpectraIndexes(std::string line,
                                          std::vector<int64_t> &unUsedSpec,
                                          std::string seperator) {
   // remove comments and white space
-  Poco::StringTokenizer dataComment(line, seperator, IGNORE_SPACES);
+  Mantid::Kernel::StringTokenizer dataComment(line, seperator, IGNORE_SPACES);
   for (auto itr = dataComment.begin(); itr != dataComment.end(); ++itr) {
     std::vector<size_t> specNums;
     specNums.reserve(output.capacity());
@@ -1252,12 +1255,13 @@ void GroupDetectors2::RangeHelper::getList(const std::string &line,
                       // function
     return;
   }
-  Poco::StringTokenizer ranges(line, "-");
+  Mantid::Kernel::StringTokenizer ranges(line, "-");
 
   try {
     size_t loop = 0;
     do {
-      Poco::StringTokenizer beforeHyphen(ranges[loop], " ", IGNORE_SPACES);
+      Mantid::Kernel::StringTokenizer beforeHyphen(ranges[loop], " ",
+                                                   IGNORE_SPACES);
       auto readPostion = beforeHyphen.begin();
       if (readPostion == beforeHyphen.end()) {
         throw std::invalid_argument("'-' found at the start of a list, can't "
@@ -1273,7 +1277,8 @@ void GroupDetectors2::RangeHelper::getList(const std::string &line,
         break;
       }
 
-      Poco::StringTokenizer afterHyphen(ranges[loop + 1], " ", IGNORE_SPACES);
+      Mantid::Kernel::StringTokenizer afterHyphen(ranges[loop + 1], " ",
+                                                  IGNORE_SPACES);
       readPostion = afterHyphen.begin();
       if (readPostion == afterHyphen.end()) {
         throw std::invalid_argument("A '-' follows straight after another '-', "
diff --git a/Framework/DataHandling/src/ISISDataArchive.cpp b/Framework/DataHandling/src/ISISDataArchive.cpp
index 7bea0f139c19b804241589b8e5b5a9933ddcaceb..3bbacf6e1c3cd5155d6d56d3603349171c35ee18 100644
--- a/Framework/DataHandling/src/ISISDataArchive.cpp
+++ b/Framework/DataHandling/src/ISISDataArchive.cpp
@@ -8,7 +8,7 @@
 
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Exception.h>
 
 #include <sstream>
diff --git a/Framework/DataHandling/src/LoadAscii.cpp b/Framework/DataHandling/src/LoadAscii.cpp
index ee39464e0c9157d562dee20310c21efeb745c2e8..d2a9fa9fd076cbdfb1b23cd9162a2099eb32d0c8 100644
--- a/Framework/DataHandling/src/LoadAscii.cpp
+++ b/Framework/DataHandling/src/LoadAscii.cpp
@@ -12,7 +12,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 
diff --git a/Framework/DataHandling/src/LoadAscii2.cpp b/Framework/DataHandling/src/LoadAscii2.cpp
index 284ed879bc66f58b10ec66e8b574229c535161f0..ea97c6347e58f1cfed28c99e9707cf9727500bae 100644
--- a/Framework/DataHandling/src/LoadAscii2.cpp
+++ b/Framework/DataHandling/src/LoadAscii2.cpp
@@ -12,7 +12,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 #include <boost/regex.hpp>
diff --git a/Framework/DataHandling/src/LoadNexusProcessed.cpp b/Framework/DataHandling/src/LoadNexusProcessed.cpp
index 556d63c54c7c070d251760ac9e28db09112c5d1d..d3e0af2882b588f1d55aaf6912b929e141bea810 100644
--- a/Framework/DataHandling/src/LoadNexusProcessed.cpp
+++ b/Framework/DataHandling/src/LoadNexusProcessed.cpp
@@ -29,7 +29,7 @@
 #include <boost/lexical_cast.hpp>
 #include <boost/shared_array.hpp>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <nexus/NeXusException.hpp>
 
@@ -1762,7 +1762,8 @@ bool UDlesserExecCount(NXClassInfo elem1, NXClassInfo elem2) {
 void LoadNexusProcessed::getWordsInString(const std::string &words3,
                                           std::string &w1, std::string &w2,
                                           std::string &w3) {
-  Poco::StringTokenizer data(words3, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words3, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 3) {
     g_log.warning() << "Algorithm list line " + words3 +
                            " is not of the correct format\n";
@@ -1788,7 +1789,8 @@ void LoadNexusProcessed::getWordsInString(const std::string &words3,
 void LoadNexusProcessed::getWordsInString(const std::string &words4,
                                           std::string &w1, std::string &w2,
                                           std::string &w3, std::string &w4) {
-  Poco::StringTokenizer data(words4, " ", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer data(
+      words4, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   if (data.count() != 4) {
     g_log.warning() << "Algorithm list line " + words4 +
                            " is not of the correct format\n";
diff --git a/Framework/DataHandling/src/LoadRKH.cpp b/Framework/DataHandling/src/LoadRKH.cpp
index d737c518fdb8159da986971a604294e2851b21d6..8ee92fcf67db69a357191630363b763b550d6ab6 100644
--- a/Framework/DataHandling/src/LoadRKH.cpp
+++ b/Framework/DataHandling/src/LoadRKH.cpp
@@ -15,7 +15,7 @@
 #include <boost/date_time/date_parsing.hpp>
 #include <boost/lexical_cast.hpp>
 #include <boost/algorithm/string.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <istream>
 
@@ -411,8 +411,9 @@ Progress LoadRKH::read2DHeader(const std::string &initalLine,
   if (fileLine.size() < 5) {
     std::getline(m_fileIn, fileLine);
   }
-  Poco::StringTokenizer wsDimensions(fileLine, " ",
-                                     Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer wsDimensions(
+      fileLine, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                         Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (wsDimensions.count() < 2) {
     throw Exception::NotFoundError("Input file", "dimensions");
   }
@@ -460,7 +461,9 @@ void LoadRKH::readNumEntrys(const int nEntries, MantidVec &output) {
 */
 const std::string LoadRKH::readUnit(const std::string &line) {
   // split the line into words
-  const Poco::StringTokenizer codes(line, " ", Poco::StringTokenizer::TOK_TRIM);
+  const Mantid::Kernel::StringTokenizer codes(
+      line, " ", Mantid::Kernel::StringTokenizer::TOK_TRIM |
+                     Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (codes.count() < 1) {
     return "C++ no unit found";
   }
diff --git a/Framework/DataHandling/src/LoadReflTBL.cpp b/Framework/DataHandling/src/LoadReflTBL.cpp
index 260e726dfd89b57a820363b9170e2d822d3f41f9..c776e0ed0b9772f3afca5213132e1948767a0197 100644
--- a/Framework/DataHandling/src/LoadReflTBL.cpp
+++ b/Framework/DataHandling/src/LoadReflTBL.cpp
@@ -9,7 +9,7 @@
 #include <fstream>
 
 #include <boost/tokenizer.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 // String utilities
 #include <boost/algorithm/string.hpp>
 
diff --git a/Framework/DataHandling/src/LoadSpice2D.cpp b/Framework/DataHandling/src/LoadSpice2D.cpp
index 4312bfd53be4b482ea97fedfd0c5bfc156e0ed5e..3d17fe969518149e36f09b0bc03f8b2ffcf40882 100644
--- a/Framework/DataHandling/src/LoadSpice2D.cpp
+++ b/Framework/DataHandling/src/LoadSpice2D.cpp
@@ -15,7 +15,7 @@
 #include <boost/regex.hpp>
 #include <boost/shared_array.hpp>
 #include <Poco/Path.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/DOM/DOMParser.h>
 #include <Poco/DOM/Document.h>
 #include <Poco/DOM/Element.h>
diff --git a/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp b/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
index 04f7103e3cbb4f2464c4481d6ee490a212fffa50..82ea9cd7f3f97cc17e45da06d6334d89c49fa619 100644
--- a/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
+++ b/Framework/DataHandling/src/UpdateInstrumentFromFile.cpp
@@ -17,7 +17,7 @@
 #include <boost/scoped_ptr.hpp>
 #include <boost/algorithm/string/predicate.hpp>
 #include <nexus/NeXusException.hpp>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <fstream>
 
@@ -311,7 +311,8 @@ bool UpdateInstrumentFromFile::parseAsciiHeader(
                                 "property is empty, cannot interpret columns");
   }
 
-  Poco::StringTokenizer splitter(header, ",", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer splitter(
+      header, ",", Mantid::Kernel::StringTokenizer::TOK_TRIM);
   headerInfo.colCount = splitter.count();
   auto it =
       splitter.begin(); // First column must be spectrum number or detector ID
diff --git a/Framework/DataHandling/test/SaveParameterFileTest.h b/Framework/DataHandling/test/SaveParameterFileTest.h
index 29cd23889e57d4088a018c1abc7de8531db2fc71..6ce13bf6c081ad934ac9aed300182df64b8719b7 100644
--- a/Framework/DataHandling/test/SaveParameterFileTest.h
+++ b/Framework/DataHandling/test/SaveParameterFileTest.h
@@ -136,7 +136,7 @@ public:
         param->value<FitParameter>();
 
     // Info about fitting parameter is in string value, see FitParameter class
-    typedef Poco::StringTokenizer tokenizer;
+    typedef Mantid::Kernel::StringTokenizer tokenizer;
     tokenizer values(value, ",", tokenizer::TOK_TRIM);
     TS_ASSERT_EQUALS(fitParam.getFormula(), values[7]);
     TS_ASSERT_EQUALS(fitParam.getFunction(), values[1]);
diff --git a/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h b/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
index 07507db27149091f2bd2c808f04481512ae8c077..248070ef7be52e1555936343aaafab032dfbfd56 100644
--- a/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
+++ b/Framework/Geometry/inc/MantidGeometry/Instrument/Parameter.h
@@ -212,7 +212,8 @@ template <class Type> std::string ParameterType<Type>::asString() const {
  */
 template <class Type>
 void ParameterType<Type>::fromString(const std::string &value) {
-  std::istringstream istr(value);
+  std::string newvalue(value.begin(), value.end() - 1);
+  std::istringstream istr(newvalue);
   istr >> m_value;
 }
 
diff --git a/Framework/Geometry/src/Instrument/FitParameter.cpp b/Framework/Geometry/src/Instrument/FitParameter.cpp
index ff04884990cb60e3064b5438e9ea1419cacaf33b..a8f5996da8b047ea19b3335421d1340166aec726 100644
--- a/Framework/Geometry/src/Instrument/FitParameter.cpp
+++ b/Framework/Geometry/src/Instrument/FitParameter.cpp
@@ -4,7 +4,7 @@
 #include "MantidGeometry/Instrument/FitParameter.h"
 #include "MantidGeometry/Instrument/Parameter.h"
 #include "MantidGeometry/Instrument/ParameterFactory.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include "MantidGeometry/muParser_Silent.h"
 
 namespace Mantid {
@@ -162,7 +162,7 @@ std::ostream &operator<<(std::ostream &os, const FitParameter &f) {
 */
 std::istream &operator>>(std::istream &in, FitParameter &f) {
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   std::string str;
   getline(in, str);
   tokenizer values(str, ",", tokenizer::TOK_TRIM);
diff --git a/Framework/Kernel/CMakeLists.txt b/Framework/Kernel/CMakeLists.txt
index 994539535d5d346a174c5c029f938f7105c2733f..9931bd15a68e3c4d3314c853cf7ff5218c066562 100644
--- a/Framework/Kernel/CMakeLists.txt
+++ b/Framework/Kernel/CMakeLists.txt
@@ -244,6 +244,7 @@ set ( INC_FILES
 	inc/MantidKernel/StdoutChannel.h
 	inc/MantidKernel/StringContainsValidator.h
 	inc/MantidKernel/Strings.h
+	inc/MantidKernel/StringTokenizer.h
 	inc/MantidKernel/System.h
 	inc/MantidKernel/Task.h
 	inc/MantidKernel/TestChannel.h
diff --git a/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h b/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
index cef85abbfab800bc889c85dfdc0bf9e5d0af86bb..c0a3af55391e16ef509c2c33cf1ed0505adb38d9 100644
--- a/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
+++ b/Framework/Kernel/inc/MantidKernel/PropertyWithValue.h
@@ -15,7 +15,7 @@
 #include <boost/shared_ptr.hpp>
 #endif
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <vector>
 #include "MantidKernel/IPropertySettings.h"
 
@@ -149,7 +149,7 @@ void toValue(const std::string &, boost::shared_ptr<T> &) {
 template <typename T>
 void toValue(const std::string &strvalue, std::vector<T> &value) {
   // Split up comma-separated properties
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer values(strvalue, ",",
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
 
@@ -165,7 +165,7 @@ template <typename T>
 void toValue(const std::string &strvalue, std::vector<std::vector<T>> &value,
              const std::string &outerDelimiter = ",",
              const std::string &innerDelimiter = "+") {
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer tokens(strvalue, outerDelimiter,
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
 
@@ -199,7 +199,7 @@ template <typename T> T extractToValueVector(const std::string &strvalue) {
   template <>                                                                  \
   inline void toValue<type>(const std::string &strvalue,                       \
                             std::vector<type> &value) {                        \
-    typedef Poco::StringTokenizer tokenizer;                                   \
+    typedef Mantid::Kernel::StringTokenizer tokenizer;                         \
     tokenizer values(strvalue, ",",                                            \
                      tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);       \
     value.clear();                                                             \
@@ -213,9 +213,22 @@ PROPERTYWITHVALUE_TOVALUE(int)
 PROPERTYWITHVALUE_TOVALUE(long)
 PROPERTYWITHVALUE_TOVALUE(uint32_t)
 PROPERTYWITHVALUE_TOVALUE(uint64_t)
-#if defined(__APPLE__)
-PROPERTYWITHVALUE_TOVALUE(unsigned long);
-#endif
+//#if defined(__APPLE__)
+// PROPERTYWITHVALUE_TOVALUE(unsigned long);
+//#endif
+
+template <>
+inline void toValue<unsigned long>(const std::string &strvalue,
+                                   std::vector<unsigned long> &value) {
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
+  tokenizer values(strvalue, ",",
+                   tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
+  value.clear();
+  value.reserve(values.count());
+  for (tokenizer::Iterator it = values.begin(); it != values.end(); ++it) {
+    appendValue(*it, value);
+  }
+}
 
 // Clear up the namespace
 #undef PROPERTYWITHVALUE_TOVALUE
diff --git a/Framework/Kernel/inc/MantidKernel/StringTokenizer.h b/Framework/Kernel/inc/MantidKernel/StringTokenizer.h
new file mode 100644
index 0000000000000000000000000000000000000000..ac3b574da1a4309561b271a4d39254d7c1b019ac
--- /dev/null
+++ b/Framework/Kernel/inc/MantidKernel/StringTokenizer.h
@@ -0,0 +1,96 @@
+//
+//  StringTokenizer.h
+//  Mantid
+//
+//  Created by Hahn, Steven E. on 1/29/16.
+//
+//
+
+#ifndef StringTokenizer_h
+#define StringTokenizer_h
+
+#include <Poco/StringTokenizer.h>
+#include "MantidKernel/make_unique.h"
+
+namespace Mantid {
+namespace Kernel {
+
+class StringTokenizer
+    /// A simple tokenizer that splits a string into
+    /// tokens, which are separated by separator characters.
+    /// An iterator is used to iterate over all tokens.
+    {
+public:
+  enum Options {
+    TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
+    TOK_TRIM = 2          /// remove leading and trailing whitespace from tokens
+  };
+
+  typedef Poco::StringTokenizer::TokenVec TokenVec;
+  typedef Poco::StringTokenizer::Iterator Iterator;
+  StringTokenizer(const std::string &str, const std::string &separators,
+                  int options = 0){/*if(str.back() == ' '){
+              std::string newstr(str,0,str.size()-1);
+              m_tokenizer =
+Mantid::Kernel::make_unique<Mantid::Kernel::StringTokenizer>(newstr,separators,options);
+            }
+            else*/ {
+      m_tokenizer = Mantid::Kernel::make_unique<Poco::StringTokenizer>(
+          str, separators, options);
+}
+};
+/// Splits the given string into tokens. The tokens are expected to be
+/// separated by one of the separator characters given in separators.
+/// Additionally, options can be specified:
+///   * TOK_IGNORE_EMPTY: empty tokens are ignored
+///   * TOK_TRIM: trailing and leading whitespace is removed from tokens.
+
+~StringTokenizer() = default;
+/// Destroys the tokenizer.
+
+Iterator begin() const { return m_tokenizer->begin(); };
+Iterator end() const { return m_tokenizer->end(); };
+
+const std::string &operator[](std::size_t index) const {
+  return (*m_tokenizer)[index];
+};
+/// Returns const reference the index'th token.
+/// Throws a RangeException if the index is out of range.
+
+std::string &operator[](std::size_t index) { return (*m_tokenizer)[index]; };
+/// Returns reference to the index'th token.
+/// Throws a RangeException if the index is out of range.
+
+bool has(const std::string &token) const { return m_tokenizer->has(token); };
+/// Returns true if token exists, false otherwise.
+
+std::size_t find(const std::string &token, std::size_t pos = 0) const {
+  return m_tokenizer->find(token, pos);
+};
+/// Returns the index of the first occurrence of the token
+/// starting at position pos.
+/// Throws a NotFoundException if the token is not found.
+
+std::size_t replace(const std::string &oldToken, const std::string &newToken,
+                    std::size_t pos = 0) {
+  return m_tokenizer->replace(oldToken, newToken, pos);
+};
+/// Starting at position pos, replaces all subsequent tokens having value
+/// equal to oldToken with newToken.
+/// Returns the number of modified tokens.
+
+std::size_t count() const { return m_tokenizer->count(); };
+/// Returns the total number of tokens.
+
+std::size_t count(const std::string &token) const {
+  return m_tokenizer->count(token);
+};
+/// Returns the number of tokens equal to the specified token.
+
+private:
+std::unique_ptr<Poco::StringTokenizer> m_tokenizer;
+};
+}
+}
+
+#endif /* StringTokenizer_h */
diff --git a/Framework/Kernel/src/ConfigService.cpp b/Framework/Kernel/src/ConfigService.cpp
index 71f6ee254fa1ad5f5c77ea71a6be99d52856932e..449132fa632e99e6737cbf3c7f3df4f86fc40267 100644
--- a/Framework/Kernel/src/ConfigService.cpp
+++ b/Framework/Kernel/src/ConfigService.cpp
@@ -19,7 +19,7 @@
 #include <Poco/LoggingFactory.h>
 #include <Poco/Path.h>
 #include <Poco/File.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/DOM/DOMParser.h>
 #include <Poco/DOM/Document.h>
 #include <Poco/DOM/NodeList.h>
@@ -78,11 +78,11 @@ void splitPath(const std::string &path, std::vector<std::string> &splitted) {
     return;
   }
 
-  int options =
-      Poco::StringTokenizer::TOK_TRIM + Poco::StringTokenizer::TOK_IGNORE_EMPTY;
+  int options = Mantid::Kernel::StringTokenizer::TOK_TRIM +
+                Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY;
 
   splitted.clear();
-  Poco::StringTokenizer tokenizer(path, ";,", options);
+  Mantid::Kernel::StringTokenizer tokenizer(path, ";,", options);
   auto iend = tokenizer.end();
   splitted.reserve(tokenizer.count());
   for (auto itr = tokenizer.begin(); itr != iend; ++itr) {
diff --git a/Framework/Kernel/src/FacilityInfo.cpp b/Framework/Kernel/src/FacilityInfo.cpp
index e652d9066472e35f86f294c3eb89c8a808aad061..8740399e1c4596d4aa9dd876236105e609f63b74 100644
--- a/Framework/Kernel/src/FacilityInfo.cpp
+++ b/Framework/Kernel/src/FacilityInfo.cpp
@@ -13,7 +13,7 @@
 
 #include <Poco/DOM/Element.h>
 #include <Poco/DOM/NodeList.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 using Poco::XML::Element;
 
@@ -71,7 +71,7 @@ void FacilityInfo::fillExtensions(const Poco::XML::Element *elem) {
     g_log.error("No file extensions defined");
     throw std::runtime_error("No file extensions defined");
   }
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer exts(extsStr, ",",
                  tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   for (auto it = exts.begin(); it != exts.end(); ++it) {
diff --git a/Framework/Kernel/src/FilterChannel.cpp b/Framework/Kernel/src/FilterChannel.cpp
index c6dab197e19ac841f9ce65520453d0ba5fd0842d..2986b221165161ebdb0589dc06fcff8bbff51c9d 100644
--- a/Framework/Kernel/src/FilterChannel.cpp
+++ b/Framework/Kernel/src/FilterChannel.cpp
@@ -2,7 +2,7 @@
 #include "MantidKernel/FilterChannel.h"
 
 #include <Poco/LoggingRegistry.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Message.h>
 
 namespace Poco {
diff --git a/Framework/Kernel/src/Interpolation.cpp b/Framework/Kernel/src/Interpolation.cpp
index 5ccaa452831e6e20ca338a6d8414b05257f2b322..1307286d3bb37cb64e06c2329b3cc0c8173e62a3 100644
--- a/Framework/Kernel/src/Interpolation.cpp
+++ b/Framework/Kernel/src/Interpolation.cpp
@@ -1,7 +1,7 @@
 #include "MantidKernel/Interpolation.h"
 #include "MantidKernel/Logger.h"
 #include "MantidKernel/UnitFactory.h"
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 namespace Mantid {
 namespace Kernel {
@@ -170,7 +170,7 @@ std::ostream &operator<<(std::ostream &os, const Interpolation &f) {
 */
 std::istream &operator>>(std::istream &in, Interpolation &f) {
 
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   std::string str;
   getline(in, str);
   tokenizer values(str, ";", tokenizer::TOK_TRIM);
diff --git a/Framework/Kernel/src/Strings.cpp b/Framework/Kernel/src/Strings.cpp
index 74e8eb569e9ad5cfd3548ec93cd55fa5bcb3a3be..3dc65243628718d79f69575b4ddf62e3813295eb 100644
--- a/Framework/Kernel/src/Strings.cpp
+++ b/Framework/Kernel/src/Strings.cpp
@@ -1,7 +1,7 @@
 #include "MantidKernel/Strings.h"
 #include "MantidKernel/UnitLabel.h"
+#include "MantidKernel/StringTokenizer.h"
 
-#include <Poco/StringTokenizer.h>
 #include <Poco/Path.h>
 
 #include <boost/algorithm/string.hpp>
@@ -436,11 +436,12 @@ std::map<std::string, std::string>
 splitToKeyValues(const std::string &input, const std::string &keyValSep,
                  const std::string &listSep) {
   std::map<std::string, std::string> keyValues;
-  const int splitOptions =
-      Poco::StringTokenizer::TOK_IGNORE_EMPTY + Poco::StringTokenizer::TOK_TRIM;
-  Poco::StringTokenizer listSplitter(input, listSep);
+  const int splitOptions = Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY +
+                           Mantid::Kernel::StringTokenizer::TOK_TRIM;
+  Mantid::Kernel::StringTokenizer listSplitter(input, listSep);
   for (auto iter = listSplitter.begin(); iter != listSplitter.end(); ++iter) {
-    Poco::StringTokenizer keyValSplitter(*iter, keyValSep, splitOptions);
+    Mantid::Kernel::StringTokenizer keyValSplitter(*iter, keyValSep,
+                                                   splitOptions);
     if (keyValSplitter.count() == 2) {
       keyValues[keyValSplitter[0]] = keyValSplitter[1];
     }
@@ -1045,24 +1046,14 @@ int isMember(const std::vector<std::string> &group,
  */
 std::vector<int> parseRange(const std::string &str, const std::string &elemSep,
                             const std::string &rangeSep) {
-  typedef Poco::StringTokenizer Tokenizer;
+  typedef Mantid::Kernel::StringTokenizer Tokenizer;
 
   boost::shared_ptr<Tokenizer> elements;
 
   if (elemSep.find(' ') != std::string::npos) {
-    // If element separator contains space character it's a special case,
-    // because in that case
-    // it is allowed to have element separator inside a range, e.g. "4 - 5", but
-    // not "4,-5"
-
-    // Space is added so that last empty element of the "1,2,3-" is not ignored
-    // and we can
-    // spot the error. Behaviour is changed in Poco 1.5 and this will not be
-    // needed.
-    Tokenizer ranges(str + " ", rangeSep, Tokenizer::TOK_TRIM);
+    Tokenizer ranges(str, rangeSep, Tokenizer::TOK_TRIM);
     std::string new_str =
         join(ranges.begin(), ranges.end(), rangeSep.substr(0, 1));
-
     elements = boost::make_shared<Tokenizer>(
         new_str, elemSep, Tokenizer::TOK_IGNORE_EMPTY | Tokenizer::TOK_TRIM);
   } else {
@@ -1076,8 +1067,7 @@ std::vector<int> parseRange(const std::string &str, const std::string &elemSep,
   result.reserve(elements->count());
 
   for (auto it = elements->begin(); it != elements->end(); it++) {
-    // See above for the reason space is added
-    Tokenizer rangeElements(*it + " ", rangeSep, Tokenizer::TOK_TRIM);
+    Tokenizer rangeElements(*it, rangeSep, Tokenizer::TOK_TRIM);
 
     size_t noOfRangeElements = rangeElements.count();
 
diff --git a/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp b/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
index 8fe24a9e938278b945a1b993cc69bbd7e03a7e8f..df5e2bad7567a09cb0663f677898fa201f42cc35 100644
--- a/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
+++ b/Framework/MDAlgorithms/src/BoxControllerSettingsAlgorithm.cpp
@@ -36,7 +36,7 @@ void BoxControllerSettingsAlgorithm::initBoxControllerProps(
 
   // Split up comma-separated properties
   std::vector<int> value;
-  typedef Poco::StringTokenizer tokenizer;
+  typedef Mantid::Kernel::StringTokenizer tokenizer;
   tokenizer values(SplitInto, ",",
                    tokenizer::TOK_IGNORE_EMPTY | tokenizer::TOK_TRIM);
   value.clear();
diff --git a/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp b/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
index 87910596d46ebc807626024cd6ab3440dd99f8c2..ff488bb337e2492b23b023beebd93e85600dd9eb 100644
--- a/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
+++ b/Framework/WorkflowAlgorithms/src/HFIRInstrument.cpp
@@ -6,7 +6,7 @@
 #include "MantidKernel/Property.h"
 #include "MantidKernel/PropertyWithValue.h"
 #include "MantidDataObjects/Workspace2D.h"
-#include "Poco/StringTokenizer.h"
+#include "MantidKernel/StringTokenizer.h"
 #include "Poco/NumberParser.h"
 
 namespace Mantid {
@@ -97,8 +97,8 @@ double getSourceToSampleDistance(API::MatrixWorkspace_sptr dataWS) {
         "Unable to find [aperture-distances] instrument parameter");
 
   double SSD = 0;
-  Poco::StringTokenizer tok(pars[0], ",",
-                            Poco::StringTokenizer::TOK_IGNORE_EMPTY);
+  Mantid::Kernel::StringTokenizer tok(
+      pars[0], ",", Mantid::Kernel::StringTokenizer::TOK_IGNORE_EMPTY);
   if (tok.count() > 0 && tok.count() < 10 && nguides >= 0 && nguides < 9) {
     const std::string distance_as_string = tok[8 - nguides];
     if (!Poco::NumberParser::tryParseFloat(distance_as_string, SSD))
diff --git a/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp b/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
index 83ea4decceae70f25a3396d2959133a1780a12a3..8a997bd9c34a60cdd094da010b80bce7826f32ff 100644
--- a/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
+++ b/MantidQt/CustomInterfaces/src/Muon/MuonAnalysis.cpp
@@ -33,7 +33,7 @@
 
 #include <Poco/File.h>
 #include <Poco/Path.h>
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 
 #include <boost/lexical_cast.hpp>
 #include <boost/assign.hpp>
diff --git a/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp b/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
index e61ab45ee962e7b2c586f4e296a2b9be6415034b..7a1d0c3a16ec96ddf2b0213ac7bafedd090f06b8 100644
--- a/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
+++ b/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
@@ -39,7 +39,7 @@
 #include <QDesktopServices>
 #include <QUrl>
 
-#include <Poco/StringTokenizer.h>
+#include <MantidKernel/StringTokenizer.h>
 #include <Poco/Message.h>
 
 #include <boost/lexical_cast.hpp>
@@ -3311,9 +3311,11 @@ void SANSRunWindow::checkList() {
 
   bool valid(false);
   // split up the comma separated list ignoring spaces
-  Poco::StringTokenizer in(input, ",", Poco::StringTokenizer::TOK_TRIM);
+  Mantid::Kernel::StringTokenizer in(input, ",",
+                                     Mantid::Kernel::StringTokenizer::TOK_TRIM);
   try {
-    for (Poco::StringTokenizer::Iterator i = in.begin(), end = in.end();
+    for (Mantid::Kernel::StringTokenizer::Iterator i = in.begin(),
+                                                   end = in.end();
          i != end; ++i) { // try a lexical cast, we don't need its result only
                           // if there was an error
       boost::lexical_cast<double>(*i);