From: Michael R. Crusoe <michael.crusoe@gmail.com>
Subject: cherry pick py3 testing fixes from upstream
--- toil.orig/src/toil/lib/docker.py
+++ toil/src/toil/lib/docker.py
@@ -20,7 +20,6 @@
 from docker.errors import APIError
 from docker.errors import NotFound
 from docker.errors import DockerException
-from docker.utils.types import LogConfig
 from docker.api.container import ContainerApiMixin
 
 from toil.lib.retry import retry
--- toil.orig/src/toil/test/jobStores/jobStoreTest.py
+++ toil/src/toil/test/jobStores/jobStoreTest.py
@@ -31,11 +31,11 @@
 import logging
 import threading
 import os
+import sys
 import shutil
 import tempfile
 import time
 import uuid
-from stubserver import FTPStubServer
 from abc import abstractmethod, ABCMeta
 from itertools import chain, islice, count
 from threading import Thread
@@ -59,7 +59,6 @@
 from toil.job import Job, JobNode
 from toil.jobStores.abstractJobStore import (NoSuchJobException,
                                              NoSuchFileException)
-from toil.jobStores.googleJobStore import googleRetry
 from toil.jobStores.fileJobStore import FileJobStore
 from toil.test import (ToilTest,
                        needs_aws,
@@ -102,7 +101,7 @@
         @classmethod
         @memoize
         def __new__(cls, *args):
-            return super(AbstractJobStoreTest.Test, cls).__new__(*args)
+            return super(AbstractJobStoreTest.Test, cls).__new__(cls)
 
         def _createConfig(self):
             return Config()
@@ -403,18 +402,22 @@
             """Tests the sharing of files."""
             jobstore1 = self.jobstore_initialized
             jobstore2 = self.jobstore_resumed_noconfig
+ 
+            bar = 'bar'
+            if sys.version_info >= (3, 0):
+                bar = b'bar'
 
             with jobstore1.writeSharedFileStream('foo') as f:
-                f.write('bar')
+                f.write(bar)
             # ... read that file on worker, ...
             with jobstore2.readSharedFileStream('foo') as f:
-                self.assertEquals('bar', f.read())
+                self.assertEquals(bar, f.read())
             # ... and read it again on jobstore1.
             with jobstore1.readSharedFileStream('foo') as f:
-                self.assertEquals('bar', f.read())
+                self.assertEquals(bar, f.read())
 
             with jobstore1.writeSharedFileStream('nonEncrypted', isProtected=False) as f:
-                f.write('bar')
+                f.write(bar)
             self.assertUrl(jobstore1.getSharedPublicUrl('nonEncrypted'))
             self.assertRaises(NoSuchFileException, jobstore1.getSharedPublicUrl, 'missing')
 
@@ -435,12 +438,19 @@
             # Check file exists
             self.assertTrue(jobstore2.fileExists(fileOne))
             self.assertTrue(jobstore1.fileExists(fileOne))
+            one = 'one'
+            two = 'two'
+            three = 'three'
+            if sys.version_info >= (3, 0):
+                one = b'one'
+                two = b'two'
+                three = b'three'
             # ... write to the file on jobstore2, ...
             with jobstore2.updateFileStream(fileOne) as f:
-                f.write('one')
+                f.write(one)
             # ... read the file as a stream on the jobstore1, ....
             with jobstore1.readFileStream(fileOne) as f:
-                self.assertEquals(f.read(), 'one')
+                self.assertEquals(f.read(), one)
 
             # ... and copy it to a temporary physical file on the jobstore1.
             fh, path = tempfile.mkstemp()
@@ -452,27 +462,27 @@
                     shutil.copyfile(tmpPath, path)
                 finally:
                     os.unlink(tmpPath)
-                with open(path, 'r+') as f:
-                    self.assertEquals(f.read(), 'one')
+                with open(path, 'rb+') as f:
+                    self.assertEquals(f.read(), one)
                     # Write a different string to the local file ...
                     f.seek(0)
                     f.truncate(0)
-                    f.write('two')
+                    f.write(two)
                 # ... and create a second file from the local file.
                 fileTwo = jobstore1.writeFile(path, jobOnJobStore1.jobStoreID)
                 with jobstore2.readFileStream(fileTwo) as f:
-                    self.assertEquals(f.read(), 'two')
+                    self.assertEquals(f.read(), two)
                 # Now update the first file from the local file ...
                 jobstore1.updateFile(fileOne, path)
                 with jobstore2.readFileStream(fileOne) as f:
-                    self.assertEquals(f.read(), 'two')
+                    self.assertEquals(f.read(), two)
             finally:
                 os.unlink(path)
             # Create a third file to test the last remaining method.
             with jobstore2.writeFileStream(jobOnJobStore1.jobStoreID) as (f, fileThree):
-                f.write('three')
+                f.write(three)
             with jobstore1.readFileStream(fileThree) as f:
-                self.assertEquals(f.read(), 'three')
+                self.assertEquals(f.read(), three)
             # Delete a file explicitly but leave files for the implicit deletion through the parent
             jobstore2.deleteFile(fileOne)
 
@@ -510,22 +520,28 @@
 
             # Collects stats and logging messages.
             stats = set()
-
+            one = 'one'
+            two = 'two'
+            three = 'three'
+            if sys.version_info >= (3, 0):
+                one = b'one'
+                two = b'two'
+                three = b'three' 
             # No stats or logging added yet. Expect nothing.
             self.assertEquals(0, jobstore1.readStatsAndLogging(callback))
             self.assertEquals(set(), stats)
 
             # Test writing and reading.
-            jobstore2.writeStatsAndLogging('1')
+            jobstore2.writeStatsAndLogging(one)
             self.assertEquals(1, jobstore1.readStatsAndLogging(callback))
-            self.assertEquals({'1'}, stats)
+            self.assertEquals({one}, stats)
             self.assertEquals(0, jobstore1.readStatsAndLogging(callback))   # readStatsAndLogging purges saved stats etc
 
-            jobstore2.writeStatsAndLogging('1')
-            jobstore2.writeStatsAndLogging('2')
+            jobstore2.writeStatsAndLogging(one)
+            jobstore2.writeStatsAndLogging(two)
             stats = set()
             self.assertEquals(2, jobstore1.readStatsAndLogging(callback))
-            self.assertEquals({'1', '2'}, stats)
+            self.assertEquals({one, two}, stats)
 
             largeLogEntry = os.urandom(self._largeLogEntrySize())
             stats = set()
@@ -634,7 +650,7 @@
         @classmethod
         def makeImportExportTests(cls):
 
-            testClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest, GoogleJobStoreTest]
+            testClasses = [FileJobStoreTest, AWSJobStoreTest, AzureJobStoreTest]
 
             activeTestClassesByName = {testCls.__name__: testCls
                                        for testCls in testClasses
@@ -714,7 +730,13 @@
                     assignedPort = http.server_address[1]
                     url = 'http://localhost:%d' % assignedPort
                     with self.jobstore_initialized.readFileStream(self.jobstore_initialized.importFile(url)) as readable:
-                        self.assertEqual(readable.read(), StubHttpRequestHandler.fileContents)
+                        f1 = readable.read()
+                        f2 = StubHttpRequestHandler.fileContents
+                        if isinstance(f1, bytes) and not isinstance(f2, bytes):
+                            f1 = f1.decode()
+                        if isinstance(f2, bytes) and not isinstance(f1, bytes):
+                            f1 = f1.encode()
+                        self.assertEqual(f1, f2)
                 finally:
                     http.shutdown()
                     httpThread.join()
@@ -723,6 +745,7 @@
 
         def testImportFtpFile(self):
             '''Test importing a file over FTP'''
+            from stubserver import FTPStubServer
             file = {'name':'foo', 'content':'foo bar baz qux'}
             ftp = FTPStubServer(0)
             ftp.run()
@@ -785,7 +808,7 @@
                 checksumThread = Thread(target=checksumThreadFn)
                 checksumThread.start()
                 try:
-                    with open(random_device) as readable:
+                    with open(random_device, 'rb') as readable:
                         with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (writable, fileId):
                             for i in range(int(partSize * partsPerFile / bufSize)):
                                 buf = readable.read(bufSize)
@@ -813,8 +836,8 @@
                 checksum = hashlib.md5()
                 fh, path = tempfile.mkstemp()
                 try:
-                    with os.fdopen(fh, 'r+') as writable:
-                        with open(random_device) as readable:
+                    with os.fdopen(fh, 'wb+') as writable:
+                        with open(random_device, 'rb') as readable:
                             for i in range(int(partSize * partsPerFile / bufSize)):
                                 buf = readable.read(bufSize)
                                 writable.write(buf)
@@ -842,11 +865,11 @@
             job = self.jobstore_initialized.create(self.arbitraryJob)
             nullFile = self.jobstore_initialized.writeFile('/dev/null', job.jobStoreID)
             with self.jobstore_initialized.readFileStream(nullFile) as f:
-                self.assertEquals(f.read(), "")
+                assert not f.read()
             with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (f, nullStream):
                 pass
             with self.jobstore_initialized.readFileStream(nullStream) as f:
-                self.assertEquals(f.read(), "")
+                assert not f.read()
             self.jobstore_initialized.delete(job.jobStoreID)
 
         @slow
@@ -856,7 +879,7 @@
             dirPath = self._createTempDir()
             filePath = os.path.join(dirPath, 'large')
             hashIn = hashlib.md5()
-            with open(filePath, 'w') as f:
+            with open(filePath, 'wb') as f:
                 for i in range(0, 10):
                     buf = os.urandom(self._partSize())
                     f.write(buf)
@@ -874,7 +897,7 @@
 
             # Reread the file to confirm success.
             hashOut = hashlib.md5()
-            with open(filePath, 'r') as f:
+            with open(filePath, 'rb') as f:
                 while True:
                     buf = f.read(self._partSize())
                     if not buf:
@@ -962,11 +985,15 @@
                 # will get blocked on the write. Technically anything
                 # greater than the pipe buffer size plus the libc
                 # buffer size (64K + 4K(?))  should trigger this bug,
-                # but this gives us a lot of extra room just to be
-                # sure.
-                f.write('a' * 300000)
+                # but this gives us a lot of extra room just to be sure.
+
+                # python 3 requires self.fileContents to be a bytestring
+                a = 'a'
+                if sys.version_info >= (3, 0):
+                    a = b'a'
+                f.write(a * 300000)
             with self.jobstore_initialized.readFileStream(fileID) as f:
-                self.assertEquals(f.read(1), "a")
+                self.assertEquals(f.read(1), a)
             # If it times out here, there's a deadlock
 
         @abstractmethod
@@ -1091,14 +1118,14 @@
             return url
         else:
             content = os.urandom(size)
-            with open(localFilePath, 'w') as writable:
+            with open(localFilePath, 'wb') as writable:
                 writable.write(content)
 
             return url, hashlib.md5(content).hexdigest()
 
     def _hashTestFile(self, url):
         localFilePath = FileJobStore._extractPathFromUrl(urlparse.urlparse(url))
-        with open(localFilePath, 'r') as f:
+        with open(localFilePath, 'rb') as f:
             return hashlib.md5(f.read()).hexdigest()
 
     def _createExternalStore(self):
@@ -1119,54 +1146,6 @@
             os.unlink(path)
 
 
-@needs_google
-class GoogleJobStoreTest(AbstractJobStoreTest.Test):
-    projectID = os.getenv('TOIL_GOOGLE_PROJECTID')
-    headers = {"x-goog-project-id": projectID}
-
-    def _createJobStore(self):
-        from toil.jobStores.googleJobStore import GoogleJobStore
-        return GoogleJobStore(GoogleJobStoreTest.projectID + ":" + self.namePrefix)
-
-    def _corruptJobStore(self):
-        # The Google job store has only one resource, the bucket, so we can't corrupt it without
-        # fully deleting it.
-        pass
-
-    def _prepareTestFile(self, bucket, size=None):
-        from toil.jobStores.googleJobStore import GoogleJobStore
-        fileName = 'testfile_%s' % uuid.uuid4()
-        url = 'gs://%s/%s' % (bucket.name, fileName)
-        if size is None:
-            return url
-        with open('/dev/urandom', 'r') as readable:
-            contents = readable.read(size)
-        GoogleJobStore._writeToUrl(StringIO(contents), urlparse.urlparse(url))
-        return url, hashlib.md5(contents).hexdigest()
-
-    def _hashTestFile(self, url):
-        from toil.jobStores.googleJobStore import GoogleJobStore
-        contents = GoogleJobStore._getBlobFromURL(urlparse.urlparse(url)).download_as_string()
-        return hashlib.md5(contents).hexdigest()
-
-    @googleRetry
-    def _createExternalStore(self):
-        from google.cloud import storage
-        bucketName = ("import-export-test-" + str(uuid.uuid4()))
-        storageClient = storage.Client()
-        return storageClient.create_bucket(bucketName)
-
-    @googleRetry
-    def _cleanUpExternalStore(self, bucket):
-        # this is copied from googleJobStore.destroy
-        try:
-            bucket.delete(force=True)
-            # throws ValueError if bucket has more than 256 objects. Then we must delete manually
-        except ValueError:
-            bucket.delete_blobs(bucket.list_blobs)
-            bucket.delete()
-
-
 @needs_aws
 class AWSJobStoreTest(AbstractJobStoreTest.Test):
 
@@ -1448,6 +1427,9 @@
         self.send_header("Content-type", "text/plain")
         self.send_header("Content-length", len(self.fileContents))
         self.end_headers()
+        # python 3 requires self.fileContents to be a bytestring
+        if sys.version_info >= (3, 0):
+            self.fileContents = self.fileContents.encode('utf-8')
         self.wfile.write(self.fileContents)
 
 
--- toil.orig/src/toil/test/src/fileStoreTest.py
+++ toil/src/toil/test/src/fileStoreTest.py
@@ -1338,10 +1338,13 @@
     """
     for name, kind, clazz, value in inspect.classify_class_attrs(cls):
         if kind == 'static method':
-            method = value.__func__
-            args = inspect.getargspec(method).args
-            if args and args[0] == 'job':
-                globals()[name] = method
+            method = value
+            try:
+                args = inspect.getargspec(method).args
+                if args and args[0] == 'job':
+                    globals()[name] = method
+            except TypeError:
+                pass
 
 
 _exportStaticMethodAsGlobalFunctions(hidden.AbstractFileStoreTest)
--- toil.orig/src/toil/test/src/jobFileStoreTest.py
+++ toil/src/toil/test/src/jobFileStoreTest.py
@@ -134,13 +134,24 @@
             with open(tempFile, 'w') as fH:
                 fH.write(testString)
             #Write a local copy of the file using the local file
-            outputFileStoreIds.append(job.fileStore.writeGlobalFile(tempFile))
+            fileStoreID = job.fileStore.writeGlobalFile(tempFile)
+
+            # Make sure it returned a valid and correct FileID with the right size
+            assert isinstance(fileStoreID, FileID)
+            assert fileStoreID.size == len(testString.encode('utf-8'))
+
+            outputFileStoreIds.append(fileStoreID)
         else:
             #Use the writeGlobalFileStream method to write the file
             with job.fileStore.writeGlobalFileStream() as (fH, fileStoreID):
                 fH.write(testString.encode('utf-8'))
                 outputFileStoreIds.append(fileStoreID)
 
+
+            #Make sure it returned a valid and correct FileID with the right size
+            assert isinstance(fileStoreID, FileID)
+            assert fileStoreID.size == len(testString.encode('utf-8'))
+
     if chainLength > 0:
         #Make a child that will read these files and check it gets the same results
         job.addChildJobFn(fileTestJob, outputFileStoreIds, testStrings, chainLength-1)
