diff --git a/lwr/cache/__init__.py b/lwr/cache/__init__.py
index b5d79b1d8dce48786c75e6b14738d44e6db2b985..498e73896868e44eb2280f9ed31ed6cc46ca457f 100644
--- a/lwr/cache/__init__.py
+++ b/lwr/cache/__init__.py
@@ -57,7 +57,8 @@ class Cache(PersistenceStore):
         return self.destination(token)
 
     def __token(self, ip, path):
-        return sha256("IP:%s:%s" % (ip, path)).hexdigest()
+        for_hash = "IP:%s:%s" % (ip, path)
+        return sha256(for_hash.encode('UTF-8')).hexdigest()
 
 
 __all__ = [Cache]
diff --git a/lwr/lwr_client/action_mapper.py b/lwr/lwr_client/action_mapper.py
index c4e83cbabdc903dbfcee34094630c39d11f1da61..7602cef56926671db0d92eed72f5bb09664f24b1 100644
--- a/lwr/lwr_client/action_mapper.py
+++ b/lwr/lwr_client/action_mapper.py
@@ -30,23 +30,23 @@ class FileActionMapper(object):
     >>> mapper = FileActionMapper(MockClient())
     >>> unlink(f.name)
     >>> # Test first config line above, implicit path prefix mapper
-    >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')
-    ('none',)
+    >>> mapper.action('/opt/galaxy/tools/filters/catWrapper.py', 'input')[0] == u'none'
+    True
     >>> # Test another (2nd) mapper, this one with a different action
-    >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')
-    ('transfer',)
+    >>> mapper.action('/galaxy/data/files/000/dataset_1.dat', 'input')[0] == u'transfer'
+    True
     >>> # Always at least copy work_dir outputs.
-    >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')
-    ('copy',)
+    >>> mapper.action('/opt/galaxy/database/working_directory/45.sh', 'work_dir')[0] == u'copy'
+    True
     >>> # Test glob mapper (matching test)
-    >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')
-    ('copy',)
+    >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam', 'input')[0] == u'copy'
+    True
     >>> # Test glob mapper (non-matching test)
-    >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')
-    ('none',)
+    >>> mapper.action('/cool/bamfiles/projectABC/study1/patient3.bam.bai', 'input')[0] == u'none'
+    True
     >>> # Regex mapper test.
-    >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')
-    ('copy',)
+    >>> mapper.action('/old/galaxy/data/dataset_10245.dat', 'input')[0] == u'copy'
+    True
     """
 
     def __init__(self, client):
diff --git a/lwr/lwr_client/destination.py b/lwr/lwr_client/destination.py
index a6327642e665fcd78bd6dd79d81d1f94350b25e1..e4db9b47682906907f4a2e093afc17615379444b 100644
--- a/lwr/lwr_client/destination.py
+++ b/lwr/lwr_client/destination.py
@@ -51,8 +51,8 @@ def submit_params(destination_params):
 
     >>> destination_params = {"private_token": "12345", "submit_native_specification": "-q batch"}
     >>> result = submit_params(destination_params)
-    >>> result.items()
-    [('native_specification', '-q batch')]
+    >>> result
+    {'native_specification': '-q batch'}
     """
     destination_params = destination_params or {}
     return dict([(key[len(SUBMIT_PREFIX):], destination_params[key])
diff --git a/lwr/lwr_client/manager.py b/lwr/lwr_client/manager.py
index 72f50a7e33771540ba2c6c51b367c97ab4fb4de4..2f134d1193c1bc3af9f68ee7332980d218ab8aa6 100644
--- a/lwr/lwr_client/manager.py
+++ b/lwr/lwr_client/manager.py
@@ -10,9 +10,9 @@ try:
 except ImportError:
     from urllib.parse import urlencode
 try:
-    from StringIO import StringIO
+    from StringIO import StringIO as BytesIO
 except ImportError:
-    from io import StringIO
+    from io import BytesIO
 
 from .client import Client, InputCachingClient
 from .transport import get_transport
@@ -61,7 +61,11 @@ class ClientManager(object):
         return self.client_class(destination_params, job_id, job_manager_interface, **self.extra_client_kwds)
 
     def __parse_destination_params(self, destination_params):
-        if isinstance(destination_params, str) or isinstance(destination_params, unicode):
+        try:
+            unicode_type = unicode
+        except NameError:
+            unicode_type = str
+        if isinstance(destination_params, str) or isinstance(destination_params, unicode_type):
             destination_params = url_to_destination_params(destination_params)
         return destination_params
 
@@ -135,9 +139,9 @@ class LocalJobManagerInterface(object):
 
     def __build_body(self, data, input_path):
         if data is not None:
-            return StringIO(data)
+            return BytesIO(data)
         elif input_path is not None:
-            return open(input_path, 'r')
+            return open(input_path, 'rb')
         else:
             return None
 
diff --git a/lwr/lwr_client/transport/curl.py b/lwr/lwr_client/transport/curl.py
index 13010d654555d640ef55432552b7efa2109153fd..72ee1d92d5909c9eea3ff4d6846f718c41dfc275 100644
--- a/lwr/lwr_client/transport/curl.py
+++ b/lwr/lwr_client/transport/curl.py
@@ -28,6 +28,8 @@ class PycurlTransport(object):
                 c.setopt(c.INFILESIZE, filesize)
             if data:
                 c.setopt(c.POST, 1)
+                if type(data).__name__ == 'unicode':
+                    data = data.encode('UTF-8')
                 c.setopt(c.POSTFIELDS, data)
             c.perform()
             if not output_path:
diff --git a/lwr/lwr_client/transport/standard.py b/lwr/lwr_client/transport/standard.py
index 6312da4bef0958abf1b01a89aab705bedbe69ddb..9ccb652ef867a375ed6da112e8b07f7856653bbe 100644
--- a/lwr/lwr_client/transport/standard.py
+++ b/lwr/lwr_client/transport/standard.py
@@ -33,7 +33,7 @@ class Urllib2Transport(object):
             with open(output_path, 'wb') as output:
                 while True:
                     buffer = response.read(1024)
-                    if buffer == "":
+                    if not buffer:
                         break
                     output.write(buffer)
             return response
diff --git a/lwr/managers/queued.py b/lwr/managers/queued.py
index 7cba4d4f653a4d6e6b57e58c7d9dabe330923698..c09ccffde3ea526aa201272858db190fd3e7c2a4 100644
--- a/lwr/managers/queued.py
+++ b/lwr/managers/queued.py
@@ -91,13 +91,13 @@ class PersistedJobStore(JobMetadataStore):
     >>> os.remove(tf.name)
     >>> store = PersistedJobStore(tf.name)
     >>> store.enqueue("1234", "/bin/ls")
-    >>> jobs = store.persisted_jobs()
+    >>> jobs = list(store.persisted_jobs())
     >>> jobs[0][0]
     '1234'
     >>> jobs[0][1]
     '/bin/ls'
     >>> store = PersistedJobStore(tf.name)
-    >>> jobs = store.persisted_jobs()
+    >>> jobs = list(store.persisted_jobs())
     >>> jobs[0][0]
     '1234'
     >>> jobs[0][1]
diff --git a/lwr/managers/util/cli/shell/local.py b/lwr/managers/util/cli/shell/local.py
index 37346b73313218048f990695fea7c91621c6e217..375a58e2955d1cb4faa35b27d7eccf1cd2369898 100644
--- a/lwr/managers/util/cli/shell/local.py
+++ b/lwr/managers/util/cli/shell/local.py
@@ -5,7 +5,7 @@ from subprocess import Popen, PIPE
 from ..shell import BaseShellExec
 from ....util import Bunch, kill_pid
 
-TIMEOUT_ERROR_MESSAGE = 'Execution timed out'
+TIMEOUT_ERROR_MESSAGE = u'Execution timed out'
 TIMEOUT_RETURN_CODE = -1
 DEFAULT_TIMEOUT = 60
 DEFAULT_TIMEOUT_CHECK_INTERVAL = 3
@@ -16,14 +16,16 @@ class LocalShell(BaseShellExec):
 
     >>> shell = LocalShell()
     >>> def exec_python(script, **kwds): return shell.execute('python -c "%s"' % script, **kwds)
-    >>> exec_result = exec_python("print 'Hello World'")
-    >>> exec_result.stdout.strip()
-    'Hello World'
+    >>> exec_result = exec_python("from __future__ import print_function; print('Hello World')")
+    >>> exec_result.stderr == u''
+    True
+    >>> exec_result.stdout.strip() == u'Hello World'
+    True
     >>> exec_result = exec_python("import time; time.sleep(90)", timeout=3, timeout_check_interval=1)
-    >>> exec_result.stdout
-    ''
-    >>> exec_result.stderr
-    'Execution timed out'
+    >>> exec_result.stdout == u''
+    True
+    >>> exec_result.stderr == 'Execution timed out'
+    True
     >>> exec_result.returncode == TIMEOUT_RETURN_CODE
     True
     """
@@ -43,8 +45,14 @@ class LocalShell(BaseShellExec):
             sleep(timeout_check_interval)
         else:
             kill_pid(p.pid)
-            return Bunch(stdout='', stderr=TIMEOUT_ERROR_MESSAGE, returncode=TIMEOUT_RETURN_CODE)
+            return Bunch(stdout=u'', stderr=TIMEOUT_ERROR_MESSAGE, returncode=TIMEOUT_RETURN_CODE)
         outf.seek(0)
-        return Bunch(stdout=outf.read(), stderr=p.stderr.read(), returncode=p.returncode)
+        return Bunch(stdout=_read_str(outf), stderr=_read_str(p.stderr), returncode=p.returncode)
+
+
+def _read_str(stream):
+    contents = stream.read()
+    return contents.decode('UTF-8') if isinstance(contents, bytes) else contents
+
 
 __all__ = ('LocalShell',)
diff --git a/lwr/managers/util/job_script/__init__.py b/lwr/managers/util/job_script/__init__.py
index 97c04580f2bd3e1d50911a153bd23e0ccc70225b..a60ec19ae08e4abf2be4da41472b4e9c9840315b 100644
--- a/lwr/managers/util/job_script/__init__.py
+++ b/lwr/managers/util/job_script/__init__.py
@@ -2,11 +2,11 @@ from string import Template
 from pkg_resources import resource_string
 
 DEFAULT_JOB_FILE_TEMPLATE = Template(
-    resource_string(__name__, 'DEFAULT_JOB_FILE_TEMPLATE.sh')
+    resource_string(__name__, 'DEFAULT_JOB_FILE_TEMPLATE.sh').decode('UTF-8')
 )
 
 SLOTS_STATEMENT_CLUSTER_DEFAULT = \
-    resource_string(__name__, 'CLUSTER_SLOTS_STATEMENT.sh')
+    resource_string(__name__, 'CLUSTER_SLOTS_STATEMENT.sh').decode('UTF-8')
 
 SLOTS_STATEMENT_SINGLE = """
 GALAXY_SLOTS="1"
diff --git a/lwr/routes.py b/lwr/routes.py
index f3a563d1d39311c686768cd30a3a66b5661cdeeb..5dd1c4a5403c8a8f664f69c84034677bf9e55dab 100644
--- a/lwr/routes.py
+++ b/lwr/routes.py
@@ -175,7 +175,7 @@ def _handle_upload_to_directory(file_cache, directory, remote_path, body, cache_
     source = body
     if cache_token:
         cached_file = file_cache.destination(cache_token)
-        source = open(cached_file, 'r')
+        source = open(cached_file, 'rb')
         log.info("Copying cached file %s to %s" % (cached_file, path))
     copy_to_path(source, path)
     return {"path": path}
diff --git a/lwr/tools/toolbox.py b/lwr/tools/toolbox.py
index 33786fda766e35da02da6fd13c358b256f68dda0..1cda2690c3fb0c5121aa245a0080c7b67f7f113d 100644
--- a/lwr/tools/toolbox.py
+++ b/lwr/tools/toolbox.py
@@ -1,8 +1,9 @@
-
 from lwr.tools.validator import ExpressionValidator
 from xml.etree import ElementTree
 from os.path import join, abspath, dirname
 
+from io import open
+
 from logging import getLogger
 log = getLogger(__name__)
 
@@ -64,7 +65,7 @@ class InputsValidator(object):
         config_validator = self.config_validators.get(name, None)
         valid = True
         if config_validator:
-            contents = open(path, "r").read()
+            contents = open(path, "r", encoding="UTF-8").read()
             valid = config_validator.validate(job_directory, contents)
         return valid
 
diff --git a/lwr/util.py b/lwr/util.py
index 92473f853aef79565a9696965810086f50bbffbd..0ed33164c2ec740d9cc988c022cb9075643c8f1e 100644
--- a/lwr/util.py
+++ b/lwr/util.py
@@ -1,6 +1,7 @@
 import os
 import platform
 import posixpath
+import six
 from shutil import move, rmtree
 from subprocess import Popen
 from collections import deque
@@ -32,7 +33,7 @@ def _copy_and_close(object, output):
     try:
         while True:
             buffer = object.read(BUFFER_SIZE)
-            if buffer == "":
+            if not buffer:
                 break
             output.write(buffer)
     finally:
@@ -59,7 +60,7 @@ def atomicish_move(source, destination, tmp_suffix="_TMP"):
     > temp_dir = mkdtemp()
     > source = join(temp_dir, "the_source")
     > destination = join(temp_dir, "the_dest")
-    > open(source, "w").write("Hello World!")
+    > open(source, "wb").write(b"Hello World!")
     > assert exists(source)
     > assert not exists(destination)
     > atomicish_move(source, destination)
@@ -109,7 +110,7 @@ class JobDirectory(object):
         path = self._job_file(name)
         job_file = None
         try:
-            job_file = open(path, 'r')
+            job_file = open(path, 'rb')
             return job_file.read()
         except:
             if default is not None:
@@ -122,8 +123,10 @@ class JobDirectory(object):
 
     def write_file(self, name, contents):
         path = self._job_file(name)
-        job_file = open(path, 'w')
+        job_file = open(path, 'wb')
         try:
+            if isinstance(contents, six.text_type):
+                contents = contents.encode("UTF-8")
             job_file.write(contents)
         finally:
             job_file.close()
@@ -141,7 +144,7 @@ class JobDirectory(object):
     def contains_file(self, name):
         return os.path.exists(self._job_file(name))
 
-    def open_file(self, name, mode='w'):
+    def open_file(self, name, mode='wb'):
         return open(self._job_file(name), mode)
 
     def exists(self):
diff --git a/test/cache_test.py b/test/cache_test.py
index 3d03ffbfd955ebc9c1e4dce341f255a63c6555b6..d46100a0525dd0571a5bfcfcf9dd4abcdc5bce28 100644
--- a/test/cache_test.py
+++ b/test/cache_test.py
@@ -12,7 +12,7 @@ class CacheTest(TestCase):
     def setUp(self):
         self.temp_dir = mkdtemp()
         self.temp_file = NamedTemporaryFile(delete=False)
-        self.temp_file.write("Hello World!")
+        self.temp_file.write(b"Hello World!")
         self.temp_file.close()
         self.cache = Cache(self.temp_dir)
 
diff --git a/test/check.py b/test/check.py
index 1b14e85a5b79428f0a66e9e24d712107463d776e..3ec1e2f2a70939226d9e5f82b0c529203d6eb364 100644
--- a/test/check.py
+++ b/test/check.py
@@ -6,7 +6,7 @@ import traceback
 
 from lwr.lwr_client import submit_job, finish_job, ClientManager
 
-TEST_SCRIPT = """
+TEST_SCRIPT = b"""
 import sys
 output = open(sys.argv[3], 'w')
 input_input = open(sys.argv[2], 'r')
@@ -21,7 +21,7 @@ finally:
     config_input.close()
 """
 
-EXPECTED_OUTPUT = "hello world output"
+EXPECTED_OUTPUT = b"hello world output"
 
 
 class MockTool(object):
@@ -45,7 +45,7 @@ def run(options):
         temp_tool_path = os.path.join(temp_directory, "t", "script.py")
         temp_output_path = os.path.join(temp_directory, "output")
 
-        __write_to_file(temp_input_path, "Hello world input!!@!")
+        __write_to_file(temp_input_path, b"Hello world input!!@!")
         __write_to_file(temp_config_path, EXPECTED_OUTPUT)
         __write_to_file(temp_tool_path, TEST_SCRIPT)
 
@@ -126,7 +126,7 @@ def __client_manager(options):
 
 
 def __write_to_file(path, contents):
-    with open(path, "w") as file:
+    with open(path, "wb") as file:
         file.write(contents)
 
 
diff --git a/test/client_test.py b/test/client_test.py
index 3c2cd50be00399bdf6994e53a81539a90bbbd50c..a0efcf45c8da02c6ac598f52faabbeadf4e4cc44 100644
--- a/test/client_test.py
+++ b/test/client_test.py
@@ -2,6 +2,8 @@ from collections import deque
 import tempfile
 import os
 
+from six import text_type, binary_type
+
 from lwr.lwr_client.client import Client
 from lwr.lwr_client.manager import HttpJobManagerInterface
 from lwr.lwr_client.transport import Urllib2Transport
@@ -36,7 +38,7 @@ class FakeResponse(object):
         if self.first_read:
             result = self.body
         else:
-            result = ""
+            result = b""
         self.first_read = False
         return result
 
@@ -86,10 +88,11 @@ class RequestChecker(object):
     def check_data(self, data):
         if data is None:
             assert self.data is None
-        elif type(data) == str:
+        elif type(data) in (binary_type, text_type):
             assert self.data == data
         else:
-            assert data.read(1024) == self.data
+            data_read = data.read(1024)
+            assert data_read == self.data, "data_read %s is not expected data %s" % (data_read, self.data)
 
     def __call__(self, request, data=None):
         self.called = True
@@ -104,7 +107,7 @@ def test_setup():
     """ Test the setup method of Client """
     client = TestClient()
     request_checker = RequestChecker("setup")
-    response_json = '{"working_directory":"C:\\\\home\\\\dir","outputs_directory" : "C:\\\\outputs","path_separator" : "\\\\"}'
+    response_json = b'{"working_directory":"C:\\\\home\\\\dir","outputs_directory" : "C:\\\\outputs","path_separator" : "\\\\"}'
     client.expect_open(request_checker, response_json)
     setup_response = client.setup()
     request_checker.assert_called()
@@ -117,7 +120,7 @@ def test_launch():
     """ Test the launch method of client. """
     client = TestClient()
     request_checker = RequestChecker("launch", {"command_line": "python"})
-    client.expect_open(request_checker, 'OK')
+    client.expect_open(request_checker, b'OK')
     client.launch("python")
     request_checker.assert_called()
 
@@ -130,8 +133,8 @@ def __test_upload(upload_type):
         temp_file.write("Hello World!")
     finally:
         temp_file.close()
-    request_checker = RequestChecker("upload_%s" % upload_type, {"name": os.path.basename(temp_file_path)}, "Hello World!")
-    client.expect_open(request_checker, '{"path" : "C:\\\\tools\\\\foo"}')
+    request_checker = RequestChecker("upload_%s" % upload_type, {"name": os.path.basename(temp_file_path)}, b"Hello World!")
+    client.expect_open(request_checker, b'{"path" : "C:\\\\tools\\\\foo"}')
 
     if(upload_type == 'tool_file'):
         upload_result = client.put_file(temp_file_path, 'tool')
@@ -160,7 +163,7 @@ def test_upload_config():
         temp_file.close()
     modified_contents = "Hello World! <Modified>"
     request_checker = RequestChecker("upload_config_file", {"name": os.path.basename(temp_file_path)}, modified_contents)
-    client.expect_open(request_checker, '{"path" : "C:\\\\tools\\\\foo"}')
+    client.expect_open(request_checker, b'{"path" : "C:\\\\tools\\\\foo"}')
     upload_result = client.put_file(temp_file_path, 'config', contents=modified_contents)
     request_checker.assert_called()
     assert upload_result["path"] == "C:\\tools\\foo"
@@ -172,22 +175,20 @@ def test_download_output():
     temp_file = tempfile.NamedTemporaryFile()
     temp_file.close()
     request_checker = RequestChecker("get_output_type", {"name": os.path.basename(temp_file.name)})
-    client.expect_open(request_checker, '"direct"')
+    client.expect_open(request_checker, b'"direct"')
     request_checker = RequestChecker("download_output", {"name": os.path.basename(temp_file.name), "output_type": "direct"})
-    client.expect_open(request_checker, "test output contents")
+    client.expect_open(request_checker, b"test output contents")
     client.fetch_output(temp_file.name, ".")
 
-    contents = open(temp_file.name, "r")
-    try:
-        assert contents.read(1024) == "test output contents"
-    finally:
-        contents.close()
+    with open(temp_file.name, "r") as f: 
+        contents = f.read(1024)
+        assert contents == "test output contents", "Unxpected contents %s" % contents
 
 
 def test_wait():
     client = TestClient()
     request_checker = RequestChecker("check_complete")
-    client.expect_open(request_checker, '{"complete": "true", "stdout" : "output"}')
+    client.expect_open(request_checker, b'{"complete": "true", "stdout" : "output"}')
     wait_response = client.wait()
     request_checker.assert_called()
     assert wait_response['stdout'] == "output"
@@ -196,7 +197,7 @@ def test_wait():
 def test_get_status_complete_legacy():
     client = TestClient()
     request_checker = RequestChecker("check_complete")
-    client.expect_open(request_checker, '{"complete": "true", "stdout" : "output"}')
+    client.expect_open(request_checker, b'{"complete": "true", "stdout" : "output"}')
     assert client.get_status() == "complete"
     request_checker.assert_called()
 
@@ -204,7 +205,7 @@ def test_get_status_complete_legacy():
 def test_get_status_running_legacy():
     client = TestClient()
     request_checker = RequestChecker("check_complete")
-    client.expect_open(request_checker, '{"complete": "false"}')
+    client.expect_open(request_checker, b'{"complete": "false"}')
     assert client.get_status() == "running"
     request_checker.assert_called()
 
@@ -212,7 +213,7 @@ def test_get_status_running_legacy():
 def test_get_status_queued():
     client = TestClient()
     request_checker = RequestChecker("check_complete")
-    client.expect_open(request_checker, '{"complete": "false", "status" : "queued"}')
+    client.expect_open(request_checker, b'{"complete": "false", "status" : "queued"}')
     assert client.get_status() == "queued"
     request_checker.assert_called()
 
@@ -221,7 +222,7 @@ def test_get_status_invalid():
     client = TestClient()
     request_checker = RequestChecker("check_complete")
     # Mimic bug in specific older LWR instances.
-    client.expect_open(request_checker, '{"complete": "false", "status" : "status"}')
+    client.expect_open(request_checker, b'{"complete": "false", "status" : "status"}')
     assert client.get_status() == "running"
     request_checker.assert_called()
 
@@ -229,7 +230,7 @@ def test_get_status_invalid():
 def test_kill():
     client = TestClient()
     request_checker = RequestChecker("kill")
-    client.expect_open(request_checker, 'OK')
+    client.expect_open(request_checker, b'OK')
     client.kill()
     request_checker.assert_called()
 
@@ -237,6 +238,6 @@ def test_kill():
 def test_clean():
     client = TestClient()
     request_checker = RequestChecker("clean")
-    client.expect_open(request_checker, 'OK')
+    client.expect_open(request_checker, b'OK')
     client.clean()
     request_checker.assert_called()
diff --git a/test/integration_test.py b/test/integration_test.py
index 574ddb0ca687577e7d3888a75e0c260f7c80dc3d..cd64d54dfdbd9547cecf91bf6add69622fba97fb 100644
--- a/test/integration_test.py
+++ b/test/integration_test.py
@@ -1,4 +1,5 @@
 from os.path import join
+from six import next, itervalues
 from .test_utils import TempDirectoryTestCase, skipUnlessExecutable, skipUnlessModule
 
 from lwr.util import Bunch
@@ -15,7 +16,7 @@ class BaseIntegrationTest(TempDirectoryTestCase):
         app_conf = app_conf.copy()
         job_conf_props = job_conf_props.copy()
         if "suppress_output" not in kwds:
-            kwds["suppress_output"] = True
+            kwds["suppress_output"] = False
         if job_conf_props:
             job_conf = join(self.temp_directory, "job_managers.ini")
             config = ConfigParser()
@@ -31,7 +32,7 @@ class BaseIntegrationTest(TempDirectoryTestCase):
         if kwds.get("direct_interface", None):
             from .test_utils import test_app
             with test_app({}, app_conf, {}) as app:
-                options = Bunch(job_manager=app.app.managers.values()[0], file_cache=app.app.file_cache, **kwds)
+                options = Bunch(job_manager=next(itervalues(app.app.managers)), file_cache=app.app.file_cache, **kwds)
                 run(options)
         else:
             from .test_utils import test_server
diff --git a/test/test_utils.py b/test/test_utils.py
index 581377135b0ea4acd7e2dda29c871ba051e5e497..99a4f7f1739989b1088b03440023aca17ff9ced4 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -100,8 +100,12 @@ class TestAuthorization(object):
 @contextmanager
 def test_server(global_conf={}, app_conf={}, test_conf={}):
     with test_app(global_conf, app_conf, test_conf) as app:
-        from paste.exceptions.errormiddleware import ErrorMiddleware
-        error_app = ErrorMiddleware(app.app, debug=True, error_log="errors")
+        try:
+            from paste.exceptions.errormiddleware import ErrorMiddleware
+            error_app = ErrorMiddleware(app.app, debug=True, error_log="errors")
+        except ImportError:
+            # paste.exceptions not available for Python 3. 
+            error_app = app
         server = StopableWSGIServer.create(error_app)
         try:
             server.wait()