summaryrefslogtreecommitdiff
path: root/filesystems/py-fs
diff options
context:
space:
mode:
Diffstat (limited to 'filesystems/py-fs')
-rw-r--r--filesystems/py-fs/Makefile20
-rw-r--r--filesystems/py-fs/distinfo3
-rw-r--r--filesystems/py-fs/files/patch-2to35882
-rw-r--r--filesystems/py-fs/pkg-descr8
4 files changed, 5913 insertions, 0 deletions
diff --git a/filesystems/py-fs/Makefile b/filesystems/py-fs/Makefile
new file mode 100644
index 000000000000..f9b5d57677ca
--- /dev/null
+++ b/filesystems/py-fs/Makefile
@@ -0,0 +1,20 @@
+PORTNAME= fs
+PORTVERSION= 0.5.4
+PORTREVISION= 1
+CATEGORIES= filesystems devel python
+MASTER_SITES= PYPI
+PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}
+
+MAINTAINER= douglas@douglasthrift.net
+COMMENT= Python filesystem abstraction
+WWW= https://pyfilesystem.org/
+
+LICENSE= BSD3CLAUSE
+LICENSE_FILE= ${WRKSRC}/LICENSE.txt
+
+USES= dos2unix python
+USE_PYTHON= autoplist concurrent distutils
+
+NO_ARCH= yes
+
+.include <bsd.port.mk>
diff --git a/filesystems/py-fs/distinfo b/filesystems/py-fs/distinfo
new file mode 100644
index 000000000000..1c22b48a1ee9
--- /dev/null
+++ b/filesystems/py-fs/distinfo
@@ -0,0 +1,3 @@
+TIMESTAMP = 1730905183
+SHA256 (fs-0.5.4.tar.gz) = ba2cca8773435a7c86059d57cb4b8ea30fda40f8610941f7822d1ce3ffd36197
+SIZE (fs-0.5.4.tar.gz) = 231333
diff --git a/filesystems/py-fs/files/patch-2to3 b/filesystems/py-fs/files/patch-2to3
new file mode 100644
index 000000000000..5e5874a2a21b
--- /dev/null
+++ b/filesystems/py-fs/files/patch-2to3
@@ -0,0 +1,5882 @@
+--- fs/appdirfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/appdirfs.py
+@@ -84,6 +84,6 @@ class UserLogFS(OSFS):
+
+ if __name__ == "__main__":
+ udfs = UserDataFS('exampleapp', appauthor='pyfs')
+- print udfs
++ print(udfs)
+ udfs2 = UserDataFS('exampleapp2', appauthor='pyfs', create=False)
+- print udfs2
++ print(udfs2)
+--- fs/appdirs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/appdirs.py
+@@ -21,7 +21,7 @@ import os
+ PY3 = sys.version_info[0] == 3
+
+ if PY3:
+- unicode = str
++ str = str
+
+ class AppDirsError(Exception):
+ pass
+@@ -248,7 +248,7 @@ def _get_win_folder_from_registry(csidl_name):
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+- import _winreg
++ import winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+@@ -256,9 +256,9 @@ def _get_win_folder_from_registry(csidl_name):
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+- key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
++ key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
+- dir, type = _winreg.QueryValueEx(key, shell_folder_name)
++ dir, type = winreg.QueryValueEx(key, shell_folder_name)
+ return dir
+
+ def _get_win_folder_with_pywin32(csidl_name):
+@@ -268,7 +268,7 @@ def _get_win_folder_with_pywin32(csidl_name):
+ # not return unicode strings when there is unicode data in the
+ # path.
+ try:
+- dir = unicode(dir)
++ dir = str(dir)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+@@ -337,9 +337,9 @@ if __name__ == "__main__":
+ print("-- app dirs (without optional 'version')")
+ dirs = AppDirs(appname, appauthor, version="1.0")
+ for prop in props:
+- print("%s: %s" % (prop, getattr(dirs, prop)))
++ print(("%s: %s" % (prop, getattr(dirs, prop))))
+
+ print("\n-- app dirs (with optional 'version')")
+ dirs = AppDirs(appname, appauthor)
+ for prop in props:
+- print("%s: %s" % (prop, getattr(dirs, prop)))
++ print(("%s: %s" % (prop, getattr(dirs, prop))))
+--- fs/base.py.orig 2022-03-04 17:14:43 UTC
++++ fs/base.py
+@@ -12,8 +12,8 @@ For more information regarding implementing a working
+
+ """
+
+-from __future__ import with_statement
+
++
+ __all__ = ['DummyLock',
+ 'silence_fserrors',
+ 'NullFile',
+@@ -109,7 +109,7 @@ class NullFile(object):
+ def flush(self):
+ pass
+
+- def next(self):
++ def __next__(self):
+ raise StopIteration
+
+ def readline(self, *args, **kwargs):
+@@ -900,7 +900,7 @@ class FS(object):
+ chunk_size=1024 * 64,
+ progress_callback=progress_callback,
+ finished_callback=finished_callback)
+- except Exception, e:
++ except Exception as e:
+ if error_callback is not None:
+ error_callback(e)
+ finally:
+@@ -1156,7 +1156,7 @@ class FS(object):
+ def _shutil_copyfile(cls, src_syspath, dst_syspath):
+ try:
+ shutil.copyfile(src_syspath, dst_syspath)
+- except IOError, e:
++ except IOError as e:
+ # shutil reports ENOENT when a parent directory is missing
+ if getattr(e, "errno", None) == errno.ENOENT:
+ if not os.path.exists(dirname(dst_syspath)):
+--- fs/browsewin.py.orig 2022-03-04 17:14:43 UTC
++++ fs/browsewin.py
+@@ -24,7 +24,7 @@ class InfoFrame(wx.Frame):
+
+ self.SetTitle("FS Object info - %s (%s)" % (path, desc))
+
+- keys = info.keys()
++ keys = list(info.keys())
+ keys.sort()
+
+ self.list_ctrl = wx.ListCtrl(self, -1, style=wx.LC_REPORT|wx.SUNKEN_BORDER)
+@@ -36,7 +36,7 @@ class InfoFrame(wx.Frame):
+ self.list_ctrl.SetColumnWidth(1, 300)
+
+ for key in sorted(keys, key=lambda k:k.lower()):
+- self.list_ctrl.Append((key, unicode(info.get(key))))
++ self.list_ctrl.Append((key, str(info.get(key))))
+
+ self.Center()
+
+@@ -50,7 +50,7 @@ class BrowseFrame(wx.Frame):
+
+ self.fs = fs
+ self.hide_dotfiles = hide_dotfiles
+- self.SetTitle("FS Browser - " + unicode(fs))
++ self.SetTitle("FS Browser - " + str(fs))
+
+ self.tree = wx.gizmos.TreeListCtrl(self, -1, style=wx.TR_DEFAULT_STYLE | wx.TR_HIDE_ROOT)
+
+@@ -105,7 +105,7 @@ class BrowseFrame(wx.Frame):
+ try:
+ paths = ( [(True, p) for p in self.fs.listdir(path, absolute=True, dirs_only=True)] +
+ [(False, p) for p in self.fs.listdir(path, absolute=True, files_only=True)] )
+- except FSError, e:
++ except FSError as e:
+ msg = "Failed to get directory listing for %s\n\nThe following error was reported:\n\n%s" % (path, e)
+ wx.MessageDialog(self, msg, "Error listing directory", wx.OK).ShowModal()
+ paths = []
+@@ -194,6 +194,6 @@ def browse(fs, hide_dotfiles=False):
+
+
+ if __name__ == "__main__":
+- from osfs import OSFS
++ from .osfs import OSFS
+ home_fs = OSFS("~/")
+ browse(home_fs, True)
+--- fs/commands/fscp.py.orig 2022-03-04 17:14:43 UTC
++++ fs/commands/fscp.py
+@@ -3,7 +3,7 @@ from fs.utils import copyfile, copyfile_non_atomic
+ from fs.path import pathjoin, iswildcard
+ from fs.commands.runner import Command
+ import sys
+-import Queue as queue
++import queue as queue
+ import time
+ import threading
+
+@@ -31,7 +31,7 @@ class FileOpThread(threading.Thread):
+ self.dest_fs.makedir(path, recursive=True, allow_recreate=True)
+ else:
+ self.action(fs, path, self.dest_fs, dest_path, overwrite=True)
+- except Exception, e:
++ except Exception as e:
+ self.on_error(e)
+ self.queue.task_done()
+ break
+@@ -147,7 +147,7 @@ Copy SOURCE to DESTINATION"""
+ file_queue,
+ self.on_done,
+ self.on_error)
+- for i in xrange(options.threads)]
++ for i in range(options.threads)]
+
+ for thread in threads:
+ thread.start()
+@@ -188,7 +188,7 @@ Copy SOURCE to DESTINATION"""
+
+ if self.action_errors:
+ for error in self.action_errors:
+- self.error(self.wrap_error(unicode(error)) + '\n')
++ self.error(self.wrap_error(str(error)) + '\n')
+ sys.stdout.flush()
+ else:
+ if complete and options.progress:
+@@ -204,9 +204,9 @@ Copy SOURCE to DESTINATION"""
+ try:
+ if self.options.verbose:
+ if path_type == self.DIR:
+- print "mkdir %s" % dst_fs.desc(dst_path)
++ print("mkdir %s" % dst_fs.desc(dst_path))
+ else:
+- print "%s -> %s" % (src_fs.desc(src_path), dst_fs.desc(dst_path))
++ print("%s -> %s" % (src_fs.desc(src_path), dst_fs.desc(dst_path)))
+ elif self.options.progress:
+ self.done_files += 1
+ sys.stdout.write(self.progress_bar(self.total_files, self.done_files, self.get_verb()))
+--- fs/commands/fsinfo.py.orig 2022-03-04 17:14:43 UTC
++++ fs/commands/fsinfo.py
+@@ -31,12 +31,12 @@ Display information regarding an FS resource"""
+ return val
+
+ def make_printable(text):
+- if not isinstance(text, basestring):
++ if not isinstance(text, str):
+ try:
+ text = str(text)
+ except:
+ try:
+- text = unicode(text)
++ text = str(text)
+ except:
+ text = repr(text)
+ return text
+@@ -48,16 +48,16 @@ Display information regarding an FS resource"""
+ dirs_only=options.dirsonly):
+ if not options.omit:
+ if options.simple:
+- file_line = u'%s\n' % self.wrap_filename(path)
++ file_line = '%s\n' % self.wrap_filename(path)
+ else:
+- file_line = u'[%s] %s\n' % (self.wrap_filename(path), self.wrap_faded(fs.desc(path)))
++ file_line = '[%s] %s\n' % (self.wrap_filename(path), self.wrap_faded(fs.desc(path)))
+ self.output(file_line)
+ info = fs.getinfo(path)
+
+- for k, v in info.items():
++ for k, v in list(info.items()):
+ if k.startswith('_'):
+ del info[k]
+- elif not isinstance(v, (basestring, int, long, float, bool, datetime)):
++ elif not isinstance(v, (str, int, float, bool, datetime)):
+ del info[k]
+
+ if keys:
+--- fs/commands/fsls.py.orig 2022-03-04 17:14:43 UTC
++++ fs/commands/fsls.py
+@@ -37,7 +37,7 @@ List contents of [PATH]"""
+ output = self.output
+
+ if not args:
+- args = [u'.']
++ args = ['.']
+
+ dir_paths = []
+ file_paths = []
+@@ -75,13 +75,13 @@ List contents of [PATH]"""
+
+ if options.syspath:
+ # Path without a syspath, just won't be displayed
+- dir_paths = filter(None, [fs.getsyspath(path, allow_none=True) for path in dir_paths])
+- file_paths = filter(None, [fs.getsyspath(path, allow_none=True) for path in file_paths])
++ dir_paths = [_f for _f in [fs.getsyspath(path, allow_none=True) for path in dir_paths] if _f]
++ file_paths = [_f for _f in [fs.getsyspath(path, allow_none=True) for path in file_paths] if _f]
+
+ if options.url:
+ # Path without a syspath, just won't be displayed
+- dir_paths = filter(None, [fs.getpathurl(path, allow_none=True) for path in dir_paths])
+- file_paths = filter(None, [fs.getpathurl(path, allow_none=True) for path in file_paths])
++ dir_paths = [_f for _f in [fs.getpathurl(path, allow_none=True) for path in dir_paths] if _f]
++ file_paths = [_f for _f in [fs.getpathurl(path, allow_none=True) for path in file_paths] if _f]
+
+ dirs = frozenset(dir_paths)
+ paths = sorted(dir_paths + file_paths, key=lambda p: p.lower())
+@@ -95,7 +95,7 @@ List contents of [PATH]"""
+ def columnize(paths, num_columns):
+
+ col_height = (len(paths) + num_columns - 1) / num_columns
+- columns = [[] for _ in xrange(num_columns)]
++ columns = [[] for _ in range(num_columns)]
+ col_no = 0
+ col_pos = 0
+ for path in paths:
+@@ -128,11 +128,11 @@ List contents of [PATH]"""
+
+ def condense_columns(columns):
+ max_column_height = max([len(col) for col in columns])
+- lines = [[] for _ in xrange(max_column_height)]
++ lines = [[] for _ in range(max_column_height)]
+ for column in columns:
+ for line, path in zip(lines, column):
+ line.append(path)
+- return '\n'.join(u' '.join(line) for line in lines)
++ return '\n'.join(' '.join(line) for line in lines)
+
+ if options.long:
+ for path in paths:
+@@ -151,7 +151,7 @@ List contents of [PATH]"""
+ while num_cols:
+ col_height = (num_paths + num_cols - 1) // num_cols
+ line_width = 0
+- for col_no in xrange(num_cols):
++ for col_no in range(num_cols):
+ try:
+ col_width = max(path_widths[col_no * col_height: (col_no + 1) * col_height])
+ except ValueError:
+--- fs/commands/fsserve.py.orig 2015-04-12 17:24:29 UTC
++++ fs/commands/fsserve.py
+@@ -82,7 +82,7 @@ Serves the contents of PATH with one of a number of me
+ try:
+ self.output("Starting sftp server on %s:%i\n" % (options.addr, port), verbose=True)
+ server.serve_forever()
+- except Exception, e:
++ except Exception as e:
+ pass
+ finally:
+ server.server_close()
+@@ -90,7 +90,7 @@ Serves the contents of PATH with one of a number of me
+ else:
+ self.error("Server type '%s' not recognised\n" % options.type)
+
+- except IOError, e:
++ except IOError as e:
+ if e.errno == errno.EACCES:
+ self.error('Permission denied\n')
+ return 1
+--- fs/commands/fstree.py.orig 2022-03-04 17:14:43 UTC
++++ fs/commands/fstree.py
+@@ -34,7 +34,7 @@ Recursively display the contents of PATH in an ascii t
+
+ for fs, path, is_dir in self.get_resources(args, single=True):
+ if not is_dir:
+- self.error(u"'%s' is not a dir\n" % path)
++ self.error("'%s' is not a dir\n" % path)
+ return 1
+ fs.cache_hint(True)
+ if options.gui:
+--- fs/commands/runner.py.orig 2022-03-04 17:14:43 UTC
++++ fs/commands/runner.py
+@@ -68,7 +68,7 @@ else:
+
+
+ def _unicode(text):
+- if not isinstance(text, unicode):
++ if not isinstance(text, str):
+ return text.decode('ascii', 'replace')
+ return text
+
+@@ -128,17 +128,17 @@ class Command(object):
+ text = _unicode(text)
+ if not self.terminal_colors:
+ return text
+- return u'\x1b[2m%s\x1b[0m' % text
++ return '\x1b[2m%s\x1b[0m' % text
+
+ def wrap_link(self, text):
+ if not self.terminal_colors:
+ return text
+- return u'\x1b[1;33m%s\x1b[0m' % text
++ return '\x1b[1;33m%s\x1b[0m' % text
+
+ def wrap_strong(self, text):
+ if not self.terminal_colors:
+ return text
+- return u'\x1b[1m%s\x1b[0m' % text
++ return '\x1b[1m%s\x1b[0m' % text
+
+ def wrap_table_header(self, name):
+ if not self.terminal_colors:
+@@ -215,10 +215,10 @@ class Command(object):
+ return resources
+
+ def ask(self, msg):
+- return raw_input('%s: %s ' % (self.name, msg))
++ return input('%s: %s ' % (self.name, msg))
+
+ def text_encode(self, text):
+- if not isinstance(text, unicode):
++ if not isinstance(text, str):
+ text = text.decode('ascii', 'replace')
+ text = text.encode(self.encoding, 'replace')
+ return text
+@@ -226,7 +226,7 @@ class Command(object):
+ def output(self, msgs, verbose=False):
+ if verbose and not self.options.verbose:
+ return
+- if isinstance(msgs, basestring):
++ if isinstance(msgs, str):
+ msgs = (msgs,)
+ for msg in msgs:
+ self.output_file.write(self.text_encode(msg))
+@@ -276,7 +276,7 @@ class Command(object):
+
+ opener_table = []
+
+- for fs_opener in opener.openers.itervalues():
++ for fs_opener in opener.openers.values():
+ names = fs_opener.names
+ desc = getattr(fs_opener, 'desc', '')
+ opener_table.append((names, desc))
+@@ -346,12 +346,12 @@ class Command(object):
+ opener.add(new_opener)
+
+ if not six.PY3:
+- args = [unicode(arg, sys.getfilesystemencoding()) for arg in args]
++ args = [str(arg, sys.getfilesystemencoding()) for arg in args]
+ self.verbose = options.verbose
+ try:
+ return self.do_run(options, args) or 0
+- except FSError, e:
+- self.error(self.wrap_error(unicode(e)) + '\n')
++ except FSError as e:
++ self.error(self.wrap_error(str(e)) + '\n')
+ if options.debug:
+ raise
+ return 1
+@@ -361,8 +361,8 @@ class Command(object):
+ return 0
+ except SystemExit:
+ return 0
+- except Exception, e:
+- self.error(self.wrap_error('Error - %s\n' % unicode(e)))
++ except Exception as e:
++ self.error(self.wrap_error('Error - %s\n' % str(e)))
+ if options.debug:
+ raise
+ return 1
+--- fs/contrib/archivefs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/archivefs.py
+@@ -62,7 +62,7 @@ class ArchiveFS(FS):
+ :param thread_synchronize: set to True (default) to enable thread-safety
+ """
+ super(ArchiveFS, self).__init__(thread_synchronize=thread_synchronize)
+- if isinstance(f, basestring):
++ if isinstance(f, str):
+ self.fileobj = None
+ self.root_path = f
+ else:
+@@ -83,7 +83,7 @@ class ArchiveFS(FS):
+ return "<ArchiveFS: %s>" % self.root_path
+
+ def __unicode__(self):
+- return u"<ArchiveFS: %s>" % self.root_path
++ return "<ArchiveFS: %s>" % self.root_path
+
+ def getmeta(self, meta_name, default=NoDefaultMeta):
+ if meta_name == 'read_only':
+@@ -446,7 +446,7 @@ class ArchiveMountFS(mountfs.MountFS):
+ else:
+ listing = self.listdir(path, *args, **kwargs)
+ if dirs_only:
+- listing = filter(isdir, listing)
++ listing = list(filter(isdir, listing))
+ return listing
+
+ if wildcard is None:
+--- fs/contrib/bigfs/__init__.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/bigfs/__init__.py
+@@ -149,7 +149,7 @@ class _ExceptionProxy(object):
+ def __setattr__(self, name, value):
+ raise ValueError("File has been closed")
+
+- def __nonzero__(self):
++ def __bool__(self):
+ return False
+
+
+@@ -193,7 +193,7 @@ class BigFS(FS):
+ return "<BigFS: %s>" % self.big_path
+
+ def __unicode__(self):
+- return unicode(self.__str__())
++ return str(self.__str__())
+
+
+ def _parse_resource_list(self, g):
+--- fs/contrib/bigfs/subrangefile.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/bigfs/subrangefile.py
+@@ -33,7 +33,7 @@ class SubrangeFile:
+ return "<SubrangeFile: %s@%d size=%d>" % (self.name, self.startOffset, self.fileSize)
+
+ def __unicode__(self):
+- return unicode(self.__str__())
++ return str(self.__str__())
+
+ def size(self):
+ return self.fileSize
+--- fs/contrib/davfs/__init__.py.orig 2015-04-12 17:24:29 UTC
++++ fs/contrib/davfs/__init__.py
+@@ -16,21 +16,21 @@ Requires the dexml module:
+ # Copyright (c) 2009-2010, Cloud Matrix Pty. Ltd.
+ # All rights reserved; available under the terms of the MIT License.
+
+-from __future__ import with_statement
+
++
+ import os
+ import sys
+-import httplib
++import http.client
+ import socket
+-from urlparse import urlparse
++from urllib.parse import urlparse
+ import stat as statinfo
+-from urllib import quote as urlquote
+-from urllib import unquote as urlunquote
++from urllib.parse import quote as urlquote
++from urllib.parse import unquote as urlunquote
+ import base64
+ import re
+ import time
+ import datetime
+-import cookielib
++import http.cookiejar
+ import fnmatch
+ import xml.dom.pulldom
+ import threading
+@@ -78,8 +78,8 @@ class DAVFS(FS):
+ """
+
+ connection_classes = {
+- "http": httplib.HTTPConnection,
+- "https": httplib.HTTPSConnection,
++ "http": http.client.HTTPConnection,
++ "https": http.client.HTTPSConnection,
+ }
+
+ _DEFAULT_PORT_NUMBERS = {
+@@ -116,7 +116,7 @@ class DAVFS(FS):
+ self._connections = []
+ self._free_connections = {}
+ self._connection_lock = threading.Lock()
+- self._cookiejar = cookielib.CookieJar()
++ self._cookiejar = http.cookiejar.CookieJar()
+ super(DAVFS,self).__init__(thread_synchronize=thread_synchronize)
+ # Check that the server speaks WebDAV, and normalize the URL
+ # after any redirects have been followed.
+@@ -221,14 +221,14 @@ class DAVFS(FS):
+ self._free_connections = {}
+ self._connection_lock = threading.Lock()
+ self._url_p = urlparse(self.url)
+- self._cookiejar = cookielib.CookieJar()
++ self._cookiejar = http.cookiejar.CookieJar()
+
+ def getpathurl(self, path, allow_none=False):
+ """Convert a client-side path into a server-side URL."""
+ path = relpath(normpath(path))
+ if path.endswith("/"):
+ path = path[:-1]
+- if isinstance(path,unicode):
++ if isinstance(path,str):
+ path = path.encode("utf8")
+ return self.url + urlquote(path)
+
+@@ -291,7 +291,7 @@ class DAVFS(FS):
+ """Perform a single HTTP request, without any error handling."""
+ if self.closed:
+ raise RemoteConnectionError("",msg="FS is closed")
+- if isinstance(url,basestring):
++ if isinstance(url,str):
+ url = urlparse(url)
+ if self.credentials is not None:
+ username = self.credentials.get("username","")
+@@ -310,7 +310,7 @@ class DAVFS(FS):
+ if hasattr(body,"md5"):
+ md5 = body.md5.decode("hex").encode("base64")
+ con.putheader("Content-MD5",md5)
+- for hdr,val in headers.iteritems():
++ for hdr,val in headers.items():
+ con.putheader(hdr,val)
+ self._cookiejar.add_cookie_header(FakeReq(con,url.scheme,url.path))
+ con.endheaders()
+@@ -332,7 +332,7 @@ class DAVFS(FS):
+ self._give_connection(url,con)
+ resp.close = new_close
+ return resp
+- except socket.error, e:
++ except socket.error as e:
+ if not fresh:
+ return self._raw_request(url,method,body,headers,num_tries)
+ if e.args[0] in _RETRYABLE_ERRORS:
+@@ -479,7 +479,7 @@ class DAVFS(FS):
+ if not entry_ok:
+ continue
+ if wildcard is not None:
+- if isinstance(wildcard,basestring):
++ if isinstance(wildcard,str):
+ if not fnmatch.fnmatch(nm,wildcard):
+ continue
+ else:
+@@ -530,7 +530,7 @@ class DAVFS(FS):
+ if not entry_ok:
+ continue
+ if wildcard is not None:
+- if isinstance(wildcard,basestring):
++ if isinstance(wildcard,str):
+ if not fnmatch.fnmatch(nm,wildcard):
+ continue
+ else:
+@@ -610,7 +610,7 @@ class DAVFS(FS):
+ if self._isurl(path,res.href):
+ info.update(self._info_from_propfind(res))
+ if "st_mode" not in info:
+- info["st_mode"] = 0700 | statinfo.S_IFREG
++ info["st_mode"] = 0o700 | statinfo.S_IFREG
+ return info
+ finally:
+ response.close()
+@@ -647,7 +647,7 @@ class DAVFS(FS):
+ # TODO: should check for status of the propfind first...
+ # check for directory indicator
+ if findElements("DAV:","collection"):
+- info["st_mode"] = 0700 | statinfo.S_IFDIR
++ info["st_mode"] = 0o700 | statinfo.S_IFDIR
+ # check for content length
+ cl = findElements("DAV:","getcontentlength")
+ if cl:
+@@ -674,7 +674,7 @@ class DAVFS(FS):
+ if etag:
+ info["etag"] = etag
+ if "st_mode" not in info:
+- info["st_mode"] = 0700 | statinfo.S_IFREG
++ info["st_mode"] = 0o700 | statinfo.S_IFREG
+ return info
+
+
+--- fs/contrib/davfs/util.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/davfs/util.py
+@@ -8,7 +8,7 @@
+
+ import os
+ import re
+-import cookielib
++import http.cookiejar
+
+
+ def get_fileno(file):
+@@ -130,7 +130,7 @@ class FakeResp:
+ # is a tweaked version of the cookielib function of the same name.
+ #
+ _test_cookie = "sessionid=e9c9b002befa93bd865ce155270307ef; Domain=.cloud.me; expires=Wed, 10-Feb-2010 03:27:20 GMT; httponly; Max-Age=1209600; Path=/, sessionid_https=None; Domain=.cloud.me; expires=Wed, 10-Feb-2010 03:27:20 GMT; httponly; Max-Age=1209600; Path=/; secure"
+-if len(cookielib.parse_ns_headers([_test_cookie])) != 2:
++if len(http.cookiejar.parse_ns_headers([_test_cookie])) != 2:
+ def parse_ns_headers(ns_headers):
+ """Improved parser for netscape-style cookies.
+
+@@ -170,13 +170,13 @@ if len(cookielib.parse_ns_headers([_test_cookie])) !=
+ # convert expires date to seconds since epoch
+ if v.startswith('"'): v = v[1:]
+ if v.endswith('"'): v = v[:-1]
+- v = cookielib.http2time(v) # None if invalid
++ v = http.cookiejar.http2time(v) # None if invalid
+ pairs.append((k, v))
+ if pairs:
+ if not version_set:
+ pairs.append(("version", "0"))
+ result.append(pairs)
+ return result
+- cookielib.parse_ns_headers = parse_ns_headers
+- assert len(cookielib.parse_ns_headers([_test_cookie])) == 2
++ http.cookiejar.parse_ns_headers = parse_ns_headers
++ assert len(http.cookiejar.parse_ns_headers([_test_cookie])) == 2
+
+--- fs/contrib/davfs/xmlobj.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/davfs/xmlobj.py
+@@ -9,9 +9,9 @@ of dexml.Model subclasses.
+
+ """
+
+-from urlparse import urlparse, urlunparse
++from urllib.parse import urlparse, urlunparse
+
+-from httplib import responses as STATUS_CODE_TEXT
++from http.client import responses as STATUS_CODE_TEXT
+ STATUS_CODE_TEXT[207] = "Multi-Status"
+
+ import dexml
+@@ -86,7 +86,7 @@ class StatusField(fields.Value):
+ return val
+
+ def __set__(self,instance,value):
+- if isinstance(value,basestring):
++ if isinstance(value,str):
+ # sanity check it
+ bits = value.split(" ")
+ if len(bits) < 3 or bits[0] != "HTTP/1.1":
+--- fs/contrib/sqlitefs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/contrib/sqlitefs.py
+@@ -24,7 +24,7 @@ def fetchone(cursor):
+ '''
+ row = None
+ try:
+- row = cursor.next()
++ row = next(cursor)
+ except:
+ pass
+ return(row)
+@@ -62,7 +62,7 @@ class SqliteFsFileBase(object):
+ __repr__ = __str__
+
+ def __unicode__(self):
+- return u"<SqliteFS File in %s %s>" % (self.fs, self.path)
++ return "<SqliteFS File in %s %s>" % (self.fs, self.path)
+
+ def __del__(self):
+ if not self.closed:
+@@ -74,7 +74,7 @@ class SqliteFsFileBase(object):
+ def __iter__(self):
+ raise OperationFailedError('__iter__', self.path)
+
+- def next(self):
++ def __next__(self):
+ raise OperationFailedError('next', self.path)
+
+ def readline(self, *args, **kwargs):
+@@ -139,8 +139,8 @@ class SqliteReadableFile(SqliteFsFileBase):
+ def __iter__(self):
+ return iter(self.real_stream)
+
+- def next(self):
+- return self.real_stream.next()
++ def __next__(self):
++ return next(self.real_stream)
+
+ def readline(self, *args, **kwargs):
+ return self.real_stream.readline(*args, **kwargs)
+@@ -438,7 +438,7 @@ class SqliteFS(FS):
+ get the directory information dictionary.
+ '''
+ info = dict()
+- info['st_mode'] = 0755
++ info['st_mode'] = 0o755
+ return info
+
+ def _get_file_info(self, path):
+@@ -460,7 +460,7 @@ class SqliteFS(FS):
+ info['created'] = row[2]
+ info['last_modified'] = row[3]
+ info['last_accessed'] = row[4]
+- info['st_mode'] = 0666
++ info['st_mode'] = 0o666
+ return(info)
+
+ def _isfile(self,path):
+@@ -551,7 +551,7 @@ class SqliteFS(FS):
+ pass
+
+ if( absolute == False):
+- pathlist = map(lambda dpath:frombase(path,dpath), pathlist)
++ pathlist = [frombase(path,dpath) for dpath in pathlist]
+
+ return(pathlist)
+
+--- fs/contrib/tahoelafs/__init__.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/tahoelafs/__init__.py
+@@ -70,8 +70,8 @@ from fs import _thread_synchronize_default, SEEK_END
+ from fs.remote import CacheFSMixin, RemoteFileBuffer
+ from fs.base import fnmatch, NoDefaultMeta
+
+-from util import TahoeUtil
+-from connection import Connection
++from .util import TahoeUtil
++from .connection import Connection
+
+ from six import b
+
+@@ -240,7 +240,7 @@ class _TahoeLAFS(FS):
+ continue
+
+ if wildcard is not None:
+- if isinstance(wildcard,basestring):
++ if isinstance(wildcard,str):
+ if not fnmatch.fnmatch(item['name'], wildcard):
+ continue
+ else:
+@@ -269,7 +269,7 @@ class _TahoeLAFS(FS):
+
+ try:
+ self.tahoeutil.unlink(self.dircap, path)
+- except Exception, e:
++ except Exception as e:
+ raise errors.ResourceInvalidError(path)
+
+ @_fix_path
+@@ -341,8 +341,8 @@ class _TahoeLAFS(FS):
+
+ def _log(self, level, message):
+ if not logger.isEnabledFor(level): return
+- logger.log(level, u'(%d) %s' % (id(self),
+- unicode(message).encode('ASCII', 'replace')))
++ logger.log(level, '(%d) %s' % (id(self),
++ str(message).encode('ASCII', 'replace')))
+
+ @_fix_path
+ def getpathurl(self, path, allow_none=False, webapi=None):
+@@ -353,11 +353,11 @@ class _TahoeLAFS(FS):
+ webapi = self.connection.webapi
+ self._log(DEBUG, "Retrieving URL for %s over %s" % (path, webapi))
+ path = self.tahoeutil.fixwinpath(path, False)
+- return u"%s/uri/%s%s" % (webapi, self.dircap, path)
++ return "%s/uri/%s%s" % (webapi, self.dircap, path)
+
+ @_fix_path
+ def getrange(self, path, offset, length=None):
+- return self.connection.get(u'/uri/%s%s' % (self.dircap, path),
++ return self.connection.get('/uri/%s%s' % (self.dircap, path),
+ offset=offset, length=length)
+
+ @_fix_path
+@@ -379,10 +379,10 @@ class _TahoeLAFS(FS):
+ file.seek(0)
+
+ if size > self.largefilesize:
+- self.connection.put(u'/uri/%s%s' % (self.dircap, path),
++ self.connection.put('/uri/%s%s' % (self.dircap, path),
+ "PyFilesystem.TahoeLAFS: Upload started, final size %d" % size)
+
+- self.connection.put(u'/uri/%s%s' % (self.dircap, path), file, size=size)
++ self.connection.put('/uri/%s%s' % (self.dircap, path), file, size=size)
+
+ @_fix_path
+ def getinfo(self, path):
+--- fs/contrib/tahoelafs/connection.py.orig 2015-04-12 17:24:29 UTC
++++ fs/contrib/tahoelafs/connection.py
+@@ -10,17 +10,19 @@ if python3:
+ from urllib.parse import urlencode, pathname2url, quote
+ from urllib.request import Request, urlopen
+ else:
+- from urllib import urlencode, pathname2url
+- from urllib2 import Request, urlopen, quote
++ from urllib.parse import urlencode
++ from urllib.request import pathname2url
++ from urllib.request import Request, urlopen
++ from urllib.parse import quote
+
+ class PutRequest(Request):
+ def __init__(self, *args, **kwargs):
+- self.get_method = lambda: u'PUT'
++ self.get_method = lambda: 'PUT'
+ Request.__init__(self, *args, **kwargs)
+
+ class DeleteRequest(Request):
+ def __init__(self, *args, **kwargs):
+- self.get_method = lambda: u'DELETE'
++ self.get_method = lambda: 'DELETE'
+ Request.__init__(self, *args, **kwargs)
+
+ class Connection:
+@@ -32,7 +34,7 @@ class Connection:
+ '''
+ Retrieve length of string or file object and prepare HTTP headers.
+ '''
+- if isinstance(f, basestring):
++ if isinstance(f, str):
+ # Just set up content length
+ size = len(f)
+ elif getattr(f, 'read', None):
+@@ -50,20 +52,20 @@ class Connection:
+
+ def _urlencode(self, data):
+ _data = {}
+- for k, v in data.items():
++ for k, v in list(data.items()):
+ _data[k.encode('utf-8')] = v.encode('utf-8')
+ return urlencode(_data)
+
+ def _quotepath(self, path, params={}):
+ q = quote(path.encode('utf-8'), safe='/')
+ if params:
+- return u"%s?%s" % (q, self._urlencode(params))
++ return "%s?%s" % (q, self._urlencode(params))
+ return q
+
+ def _urlopen(self, req):
+ try:
+ return urlopen(req)
+- except Exception, e:
++ except Exception as e:
+ if not getattr(e, 'getcode', None):
+ raise errors.RemoteConnectionError(str(e))
+ code = e.getcode()
+@@ -85,7 +87,7 @@ class Connection:
+ data = self._urlencode(data)
+ path = self._quotepath(path)
+ if data:
+- path = u'?'.join([path, data])
++ path = '?'.join([path, data])
+
+ headers = {}
+ headers.update(self.headers)
+--- fs/contrib/tahoelafs/test_tahoelafs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/tahoelafs/test_tahoelafs.py
+@@ -35,7 +35,7 @@ class TestTahoeLAFS(unittest.TestCase,FSTestCases):#,T
+
+ def test_dircap(self):
+ # Is dircap in correct format?
+- self.assert_(self.dircap.startswith('URI:DIR2:') and len(self.dircap) > 50)
++ self.assertTrue(self.dircap.startswith('URI:DIR2:') and len(self.dircap) > 50)
+
+ def test_concurrent_copydir(self):
+ # makedir() on TahoeLAFS is currently not atomic
+--- fs/contrib/tahoelafs/util.py.orig 2022-03-04 17:14:43 UTC
++++ fs/contrib/tahoelafs/util.py
+@@ -19,7 +19,7 @@ except ImportError:
+ try:
+ import json
+ except ImportError:
+- print "simplejson (http://pypi.python.org/pypi/simplejson/) required"
++ print("simplejson (http://pypi.python.org/pypi/simplejson/) required")
+ raise
+
+ from .connection import Connection
+@@ -29,22 +29,22 @@ python3 = int(platform.python_version_tuple()[0]) > 2
+ if python3:
+ from urllib.error import HTTPError
+ else:
+- from urllib2 import HTTPError
++ from urllib.error import HTTPError
+
+ class TahoeUtil:
+ def __init__(self, webapi):
+ self.connection = Connection(webapi)
+
+ def createdircap(self):
+- return self.connection.post(u'/uri', params={u't': u'mkdir'}).read()
++ return self.connection.post('/uri', params={'t': 'mkdir'}).read()
+
+ def unlink(self, dircap, path=None):
+ path = self.fixwinpath(path, False)
+- self.connection.delete(u'/uri/%s%s' % (dircap, path))
++ self.connection.delete('/uri/%s%s' % (dircap, path))
+
+ def info(self, dircap, path):
+ path = self.fixwinpath(path, False)
+- meta = json.load(self.connection.get(u'/uri/%s%s' % (dircap, path), {u't': u'json'}))
++ meta = json.load(self.connection.get('/uri/%s%s' % (dircap, path), {'t': 'json'}))
+ return self._info(path, meta)
+
+ def fixwinpath(self, path, direction=True):
+@@ -74,7 +74,7 @@ class TahoeUtil:
+ if type == 'unknown':
+ raise errors.ResourceNotFoundError(path)
+
+- info = {'name': unicode(self.fixwinpath(path, True)),
++ info = {'name': str(self.fixwinpath(path, True)),
+ 'type': type,
+ 'size': data.get('size', 0),
+ 'ctime': None,
+@@ -83,22 +83,22 @@ class TahoeUtil:
+ info['ctime'] = data['metadata'].get('ctime')
+
+ if info['type'] == 'dirnode':
+- info['st_mode'] = 0777 | statinfo.S_IFDIR
++ info['st_mode'] = 0o777 | statinfo.S_IFDIR
+ else:
+- info['st_mode'] = 0644
++ info['st_mode'] = 0o644
+
+ return info
+
+ def list(self, dircap, path=None):
+ path = self.fixwinpath(path, False)
+
+- data = json.load(self.connection.get(u'/uri/%s%s' % (dircap, path), {u't': u'json'}))
++ data = json.load(self.connection.get('/uri/%s%s' % (dircap, path), {'t': 'json'}))
+
+ if len(data) < 2 or data[0] != 'dirnode':
+ raise errors.ResourceInvalidError('Metadata in unknown format!')
+
+ data = data[1]['children']
+- for i in data.keys():
++ for i in list(data.keys()):
+ x = self._info(i, data[i])
+ yield x
+
+@@ -106,7 +106,7 @@ class TahoeUtil:
+ path = self.fixwinpath(path, False)
+ path = pathsplit(path)
+
+- self.connection.post(u"/uri/%s%s" % (dircap, path[0]), data={u't': u'mkdir', u'name': path[1]})
++ self.connection.post("/uri/%s%s" % (dircap, path[0]), data={'t': 'mkdir', 'name': path[1]})
+
+ def move(self, dircap, src, dst):
+ if src == '/' or dst == '/':
+@@ -120,8 +120,8 @@ class TahoeUtil:
+
+ if src_tuple[0] == dst_tuple[0]:
+ # Move inside one directory
+- self.connection.post(u"/uri/%s%s" % (dircap, src_tuple[0]), data={u't': u'rename',
+- u'from_name': src_tuple[1], u'to_name': dst_tuple[1]})
++ self.connection.post("/uri/%s%s" % (dircap, src_tuple[0]), data={'t': 'rename',
++ 'from_name': src_tuple[1], 'to_name': dst_tuple[1]})
+ return
+
+ # Move to different directory. Firstly create link on dst, then remove from src
+@@ -133,7 +133,7 @@ class TahoeUtil:
+ self.unlink(dircap, dst)
+
+ uri = self.info(dircap, src)['uri']
+- self.connection.put(u"/uri/%s%s" % (dircap, dst), data=uri, params={u't': u'uri'})
++ self.connection.put("/uri/%s%s" % (dircap, dst), data=uri, params={'t': 'uri'})
+ if uri != self.info(dircap, dst)['uri']:
+ raise errors.OperationFailedError('Move failed')
+
+--- fs/errors.py.orig 2015-04-12 17:24:29 UTC
++++ fs/errors.py
+@@ -57,19 +57,19 @@ class FSError(Exception):
+
+ def __str__(self):
+ keys = {}
+- for k,v in self.__dict__.iteritems():
+- if isinstance(v,unicode):
++ for k,v in self.__dict__.items():
++ if isinstance(v,str):
+ v = v.encode(sys.getfilesystemencoding())
+ keys[k] = v
+ return str(self.msg % keys)
+
+ def __unicode__(self):
+ keys = {}
+- for k,v in self.__dict__.iteritems():
++ for k,v in self.__dict__.items():
+ if isinstance(v, six.binary_type):
+ v = v.decode(sys.getfilesystemencoding(), 'replace')
+ keys[k] = v
+- return unicode(self.msg, encoding=sys.getfilesystemencoding(), errors='replace') % keys
++ return str(self.msg, encoding=sys.getfilesystemencoding(), errors='replace') % keys
+
+ def __reduce__(self):
+ return (self.__class__,(),self.__dict__.copy(),)
+@@ -217,33 +217,33 @@ def convert_fs_errors(func):
+ def wrapper(*args,**kwds):
+ try:
+ return func(*args,**kwds)
+- except ResourceNotFoundError, e:
++ except ResourceNotFoundError as e:
+ raise OSError(errno.ENOENT,str(e))
+- except ParentDirectoryMissingError, e:
++ except ParentDirectoryMissingError as e:
+ if sys.platform == "win32":
+ raise OSError(errno.ESRCH,str(e))
+ else:
+ raise OSError(errno.ENOENT,str(e))
+- except ResourceInvalidError, e:
++ except ResourceInvalidError as e:
+ raise OSError(errno.EINVAL,str(e))
+- except PermissionDeniedError, e:
++ except PermissionDeniedError as e:
+ raise OSError(errno.EACCES,str(e))
+- except ResourceLockedError, e:
++ except ResourceLockedError as e:
+ if sys.platform == "win32":
+ raise WindowsError(32,str(e))
+ else:
+ raise OSError(errno.EACCES,str(e))
+- except DirectoryNotEmptyError, e:
++ except DirectoryNotEmptyError as e:
+ raise OSError(errno.ENOTEMPTY,str(e))
+- except DestinationExistsError, e:
++ except DestinationExistsError as e:
+ raise OSError(errno.EEXIST,str(e))
+- except StorageSpaceError, e:
++ except StorageSpaceError as e:
+ raise OSError(errno.ENOSPC,str(e))
+- except RemoteConnectionError, e:
++ except RemoteConnectionError as e:
+ raise OSError(errno.ENETDOWN,str(e))
+- except UnsupportedError, e:
++ except UnsupportedError as e:
+ raise OSError(errno.ENOSYS,str(e))
+- except FSError, e:
++ except FSError as e:
+ raise OSError(errno.EFAULT,str(e))
+ return wrapper
+
+@@ -255,7 +255,7 @@ def convert_os_errors(func):
+ def wrapper(self,*args,**kwds):
+ try:
+ return func(self,*args,**kwds)
+- except (OSError,IOError), e:
++ except (OSError,IOError) as e:
+ (exc_type,exc_inst,tb) = sys.exc_info()
+ path = getattr(e,"filename",None)
+ if path and path[0] == "/" and hasattr(self,"root_path"):
+@@ -263,53 +263,53 @@ def convert_os_errors(func):
+ if isprefix(self.root_path,path):
+ path = path[len(self.root_path):]
+ if not hasattr(e,"errno") or not e.errno:
+- raise OperationFailedError(opname,details=e),None,tb
++ raise OperationFailedError(opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENOENT:
+- raise ResourceNotFoundError(path,opname=opname,details=e),None,tb
++ raise ResourceNotFoundError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.EFAULT:
+ # This can happen when listdir a directory that is deleted by another thread
+ # Best to interpret it as a resource not found
+- raise ResourceNotFoundError(path,opname=opname,details=e),None,tb
++ raise ResourceNotFoundError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.ESRCH:
+- raise ResourceNotFoundError(path,opname=opname,details=e),None,tb
++ raise ResourceNotFoundError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENOTEMPTY:
+- raise DirectoryNotEmptyError(path,opname=opname,details=e),None,tb
++ raise DirectoryNotEmptyError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.EEXIST:
+- raise DestinationExistsError(path,opname=opname,details=e),None,tb
++ raise DestinationExistsError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == 183: # some sort of win32 equivalent to EEXIST
+- raise DestinationExistsError(path,opname=opname,details=e),None,tb
++ raise DestinationExistsError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENOTDIR:
+- raise ResourceInvalidError(path,opname=opname,details=e),None,tb
++ raise ResourceInvalidError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.EISDIR:
+- raise ResourceInvalidError(path,opname=opname,details=e),None,tb
++ raise ResourceInvalidError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.EINVAL:
+- raise ResourceInvalidError(path,opname=opname,details=e),None,tb
++ raise ResourceInvalidError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENOSPC:
+- raise StorageSpaceError(opname,path=path,details=e),None,tb
++ raise StorageSpaceError(opname,path=path,details=e).with_traceback(tb)
+ if e.errno == errno.EPERM:
+- raise PermissionDeniedError(opname,path=path,details=e),None,tb
++ raise PermissionDeniedError(opname,path=path,details=e).with_traceback(tb)
+ if hasattr(errno,"ENONET") and e.errno == errno.ENONET:
+- raise RemoteConnectionError(opname,path=path,details=e),None,tb
++ raise RemoteConnectionError(opname,path=path,details=e).with_traceback(tb)
+ if e.errno == errno.ENETDOWN:
+- raise RemoteConnectionError(opname,path=path,details=e),None,tb
++ raise RemoteConnectionError(opname,path=path,details=e).with_traceback(tb)
+ if e.errno == errno.ECONNRESET:
+- raise RemoteConnectionError(opname,path=path,details=e),None,tb
++ raise RemoteConnectionError(opname,path=path,details=e).with_traceback(tb)
+ if e.errno == errno.EACCES:
+ if sys.platform == "win32":
+ if e.args[0] and e.args[0] == 32:
+- raise ResourceLockedError(path,opname=opname,details=e),None,tb
+- raise PermissionDeniedError(opname,details=e),None,tb
++ raise ResourceLockedError(path,opname=opname,details=e).with_traceback(tb)
++ raise PermissionDeniedError(opname,details=e).with_traceback(tb)
+ # Sometimes windows gives some random errors...
+ if sys.platform == "win32":
+ if e.errno in (13,):
+- raise ResourceInvalidError(path,opname=opname,details=e),None,tb
++ raise ResourceInvalidError(path,opname=opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENAMETOOLONG:
+- raise PathError(path,details=e),None,tb
++ raise PathError(path,details=e).with_traceback(tb)
+ if e.errno == errno.EOPNOTSUPP:
+- raise UnsupportedError(opname,details=e),None,tb
++ raise UnsupportedError(opname,details=e).with_traceback(tb)
+ if e.errno == errno.ENOSYS:
+- raise UnsupportedError(opname,details=e),None,tb
+- raise OperationFailedError(opname,details=e),None,tb
++ raise UnsupportedError(opname,details=e).with_traceback(tb)
++ raise OperationFailedError(opname,details=e).with_traceback(tb)
+ return wrapper
+
+
+--- fs/expose/dokan/__init__.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/dokan/__init__.py
+@@ -54,8 +54,8 @@ systems with Dokan installed.
+ # Copyright (c) 2009-2010, Cloud Matrix Pty. Ltd.
+ # All rights reserved; available under the terms of the MIT License.
+
+-from __future__ import with_statement
+
++
+ import sys
+
+ import os
+@@ -64,7 +64,7 @@ import errno
+ import time
+ import stat as statinfo
+ import subprocess
+-import cPickle
++import pickle
+ import datetime
+ import ctypes
+ from collections import deque
+@@ -76,7 +76,7 @@ from fs.local_functools import wraps
+ from fs.wrapfs import WrapFS
+
+ try:
+- import libdokan
++ from . import libdokan
+ except (NotImplementedError, EnvironmentError, ImportError, NameError,):
+ is_available = False
+ sys.modules.pop("fs.expose.dokan.libdokan", None)
+@@ -171,12 +171,12 @@ def handle_fs_errors(func):
+ def wrapper(*args,**kwds):
+ try:
+ res = func(*args,**kwds)
+- except OSError, e:
++ except OSError as e:
+ if e.errno:
+ res = -1 * _errno2syserrcode(e.errno)
+ else:
+ res = -1
+- except Exception, e:
++ except Exception as e:
+ raise
+ else:
+ if res is None:
+@@ -424,7 +424,7 @@ class FSOperations(object):
+ info.contents.Context = 1
+ try:
+ f = self.fs.open(path, mode)
+- print path, mode, repr(f)
++ print(path, mode, repr(f))
+ except ResourceInvalidError:
+ info.contents.IsDirectory = True
+ except FSError:
+@@ -896,10 +896,10 @@ def mount(fs, drive, foreground=False, ready_callback=
+ def check_ready(mp=None):
+ if ready_callback is not False:
+ check_alive(mp)
+- for _ in xrange(100):
++ for _ in range(100):
+ try:
+ os.stat(drive+":\\")
+- except EnvironmentError, e:
++ except EnvironmentError as e:
+ check_alive(mp)
+ time.sleep(0.05)
+ else:
+@@ -989,7 +989,7 @@ class MountProcess(subprocess.Popen):
+ cmd = cmd + "data = cPickle.loads(%s); "
+ cmd = cmd + "from fs.expose.dokan import MountProcess; "
+ cmd = cmd + "MountProcess._do_mount(data)"
+- cmd = cmd % (repr(cPickle.dumps((fs,drive,dokan_opts,nowait),-1)),)
++ cmd = cmd % (repr(pickle.dumps((fs,drive,dokan_opts,nowait),-1)),)
+ cmd = [sys.executable,"-c",cmd]
+ super(MountProcess,self).__init__(cmd,**kwds)
+
+--- fs/expose/ftp.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/ftp.py
+@@ -28,7 +28,7 @@ from fs.osfs import OSFS
+ from fs.errors import convert_fs_errors
+ from fs import iotools
+
+-from six import text_type as unicode
++from six import text_type as str
+
+
+ # Get these once so we can reuse them:
+@@ -107,9 +107,9 @@ class FTPFS(ftpserver.AbstractedFS):
+ def chdir(self, path):
+ # We dont' use the decorator here, we actually decode a version of the
+ # path for use with pyfs, but keep the original for use with pyftpdlib.
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ # pyftpdlib 0.7.x
+- unipath = unicode(path, self.encoding)
++ unipath = str(path, self.encoding)
+ else:
+ # pyftpdlib 1.x
+ unipath = path
+@@ -134,7 +134,7 @@ class FTPFS(ftpserver.AbstractedFS):
+ @convert_fs_errors
+ @decode_args
+ def listdir(self, path):
+- return map(lambda x: x.encode(self.encoding), self.fs.listdir(path))
++ return [x.encode(self.encoding) for x in self.fs.listdir(path)]
+
+ @convert_fs_errors
+ @decode_args
+@@ -190,7 +190,7 @@ class FTPFS(ftpserver.AbstractedFS):
+ kwargs['st_mode'] = info['mode']
+ else:
+ # Otherwise, build one. Not executable by default.
+- mode = 0660
++ mode = 0o660
+ # Merge in the type (dir or file). File is tested first, some file systems
+ # such as ArchiveMountFS treat archive files as directories too. By checking
+ # file first, any such files will be only files (not directories).
+@@ -198,7 +198,7 @@ class FTPFS(ftpserver.AbstractedFS):
+ mode |= stat.S_IFREG
+ elif self.fs.isdir(path):
+ mode |= stat.S_IFDIR
+- mode |= 0110 # Merge in exec bit to signal dir is listable
++ mode |= 0o110 # Merge in exec bit to signal dir is listable
+ kwargs['st_mode'] = mode
+ return FakeStat(**kwargs)
+
+--- fs/expose/fuse/__init__.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/fuse/__init__.py
+@@ -56,7 +56,7 @@ import errno
+ import time
+ import stat as statinfo
+ import subprocess
+-import cPickle
++import pickle
+
+ import logging
+ logger = logging.getLogger("fs.expose.fuse")
+@@ -404,9 +404,9 @@ class FSOperations(Operations):
+ # The interesting stuff
+ if 'st_mode' not in info:
+ if self.fs.isdir(path):
+- info['st_mode'] = 0755
++ info['st_mode'] = 0o755
+ else:
+- info['st_mode'] = 0666
++ info['st_mode'] = 0o666
+ mode = info['st_mode']
+ if not statinfo.S_ISDIR(mode) and not statinfo.S_ISREG(mode):
+ if self.fs.isdir(path):
+@@ -432,7 +432,7 @@ class FSOperations(Operations):
+ except KeyError:
+ pass
+ else:
+- info["st_size"] = max(written_sizes.values() + [info["st_size"]])
++ info["st_size"] = max(list(written_sizes.values()) + [info["st_size"]])
+ return info
+
+
+@@ -491,7 +491,7 @@ def unmount(path):
+ else:
+ args = ["fusermount", "-u", path]
+
+- for num_tries in xrange(3):
++ for num_tries in range(3):
+ p = subprocess.Popen(args,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+@@ -554,7 +554,7 @@ class MountProcess(subprocess.Popen):
+ cmd = cmd + 'data = loads(%s); '
+ cmd = cmd + 'from fs.expose.fuse import MountProcess; '
+ cmd = cmd + 'MountProcess._do_mount_nowait(data)'
+- cmd = cmd % (repr(cPickle.dumps((fs, path, fuse_opts), -1)),)
++ cmd = cmd % (repr(pickle.dumps((fs, path, fuse_opts), -1)),)
+ cmd = [sys.executable, "-c", cmd]
+ super(MountProcess, self).__init__(cmd, **kwds)
+ else:
+@@ -567,7 +567,7 @@ class MountProcess(subprocess.Popen):
+ cmd = cmd + 'data = loads(%s); '
+ cmd = cmd + 'from fs.expose.fuse import MountProcess; '
+ cmd = cmd + 'MountProcess._do_mount_wait(data)'
+- cmd = cmd % (repr(cPickle.dumps((fs, path, fuse_opts, r, w), -1)),)
++ cmd = cmd % (repr(pickle.dumps((fs, path, fuse_opts, r, w), -1)),)
+ cmd = [sys.executable, "-c", cmd]
+ super(MountProcess, self).__init__(cmd, **kwds)
+ os.close(w)
+@@ -635,8 +635,8 @@ class MountProcess(subprocess.Popen):
+ opts["unmount_callback"] = unmount_callback
+ try:
+ mount(fs, path, **opts)
+- except Exception, e:
+- os.write(w, b("E") + unicode(e).encode('ascii', errors='replace'))
++ except Exception as e:
++ os.write(w, b("E") + str(e).encode('ascii', errors='replace'))
+ os.close(w)
+
+ if not successful:
+@@ -653,5 +653,5 @@ if __name__ == "__main__":
+ os.makedirs(mount_point)
+
+ def ready_callback():
+- print "READY"
++ print("READY")
+ mount(TempFS(), mount_point, foreground=True, ready_callback=ready_callback)
+--- fs/expose/fuse/fuse_ctypes.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/fuse/fuse_ctypes.py
+@@ -12,8 +12,8 @@
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-from __future__ import division
+
++
+ from ctypes import *
+ from ctypes.util import find_library
+ from errno import *
+@@ -273,7 +273,7 @@ def time_of_timespec(ts):
+ return ts.tv_sec + ts.tv_nsec / 10 ** 9
+
+ def set_st_attrs(st, attrs):
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if key in ('st_atime', 'st_mtime', 'st_ctime'):
+ timespec = getattr(st, key + 'spec')
+ timespec.tv_sec = int(val)
+@@ -314,7 +314,7 @@ class FUSE(object):
+ kwargs.setdefault('fsname', operations.__class__.__name__)
+ args.append('-o')
+ args.append(','.join(key if val == True else '%s=%s' % (key, val)
+- for key, val in kwargs.items()))
++ for key, val in list(kwargs.items())))
+ args.append(mountpoint)
+ argv = (c_char_p * len(args))(*args)
+
+@@ -331,7 +331,7 @@ class FUSE(object):
+ """Decorator for the methods that follow"""
+ try:
+ return func(*args, **kwargs) or 0
+- except OSError, e:
++ except OSError as e:
+ return -(e.errno or EFAULT)
+ except:
+ print_exc()
+@@ -400,7 +400,7 @@ class FUSE(object):
+ def statfs(self, path, buf):
+ stv = buf.contents
+ attrs = self.operations('statfs', path)
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if hasattr(stv, key):
+ setattr(stv, key, val)
+ return 0
+@@ -576,7 +576,7 @@ class Operations(object):
+
+ if path != '/':
+ raise OSError(ENOENT, '')
+- return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
++ return dict(st_mode=(S_IFDIR | 0o755), st_nlink=2)
+
+ def getxattr(self, path, name, position=0):
+ raise OSError(ENOTSUP, '')
+@@ -667,13 +667,13 @@ class Operations(object):
+
+ class LoggingMixIn:
+ def __call__(self, op, path, *args):
+- print '->', op, path, repr(args)
++ print('->', op, path, repr(args))
+ ret = '[Unknown Error]'
+ try:
+ ret = getattr(self, op)(path, *args)
+ return ret
+- except OSError, e:
++ except OSError as e:
+ ret = str(e)
+ raise
+ finally:
+- print '<-', op, repr(ret)
++ print('<-', op, repr(ret))
+--- fs/expose/fuse/fuse.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/fuse/fuse.py
+@@ -12,8 +12,8 @@
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+-from __future__ import division
+
++
+ from ctypes import *
+ from ctypes.util import find_library
+ from errno import *
+@@ -269,7 +269,7 @@ def time_of_timespec(ts):
+ return ts.tv_sec + ts.tv_nsec / 10 ** 9
+
+ def set_st_attrs(st, attrs):
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if key in ('st_atime', 'st_mtime', 'st_ctime'):
+ timespec = getattr(st, key + 'spec')
+ timespec.tv_sec = int(val)
+@@ -312,7 +312,7 @@ class FUSE(object):
+ kwargs.setdefault('fsname', operations.__class__.__name__)
+ args.append('-o')
+ args.append(','.join(key if val == True else '%s=%s' % (key, val)
+- for key, val in kwargs.items()))
++ for key, val in list(kwargs.items())))
+ args.append(mountpoint)
+ argv = (c_char_p * len(args))(*args)
+
+@@ -331,7 +331,7 @@ class FUSE(object):
+ """Decorator for the methods that follow"""
+ try:
+ return func(*args, **kwargs) or 0
+- except OSError, e:
++ except OSError as e:
+ return -(e.errno or EFAULT)
+ except:
+ print_exc()
+@@ -406,7 +406,7 @@ class FUSE(object):
+ def statfs(self, path, buf):
+ stv = buf.contents
+ attrs = self.operations('statfs', path)
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if hasattr(stv, key):
+ setattr(stv, key, val)
+ return 0
+@@ -579,7 +579,7 @@ class Operations(object):
+
+ if path != '/':
+ raise FuseOSError(ENOENT)
+- return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
++ return dict(st_mode=(S_IFDIR | 0o755), st_nlink=2)
+
+ def getxattr(self, path, name, position=0):
+ raise FuseOSError(ENOTSUP)
+@@ -670,13 +670,13 @@ class Operations(object):
+
+ class LoggingMixIn:
+ def __call__(self, op, path, *args):
+- print '->', op, path, repr(args)
++ print('->', op, path, repr(args))
+ ret = '[Unhandled Exception]'
+ try:
+ ret = getattr(self, op)(path, *args)
+ return ret
+- except OSError, e:
++ except OSError as e:
+ ret = str(e)
+ raise
+ finally:
+- print '<-', op, repr(ret)
++ print('<-', op, repr(ret))
+--- fs/expose/fuse/fuse3.py.orig 2022-03-04 17:14:43 UTC
++++ fs/expose/fuse/fuse3.py
+@@ -229,7 +229,7 @@ def time_of_timespec(ts):
+ return ts.tv_sec + ts.tv_nsec / 10 ** 9
+
+ def set_st_attrs(st, attrs):
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if key in ('st_atime', 'st_mtime', 'st_ctime'):
+ timespec = getattr(st, key + 'spec')
+ timespec.tv_sec = int(val)
+@@ -274,7 +274,7 @@ class FUSE(object):
+ kwargs.setdefault('fsname', operations.__class__.__name__)
+ args.append('-o')
+ args.append(','.join(key if val == True else '%s=%s' % (key, val)
+- for key, val in kwargs.items()))
++ for key, val in list(kwargs.items())))
+ args.append(mountpoint)
+ argv = (c_char_p * len(args))(*args)
+
+@@ -361,7 +361,7 @@ class FUSE(object):
+ def statfs(self, path, buf):
+ stv = buf.contents
+ attrs = self.operations('statfs', path)
+- for key, val in attrs.items():
++ for key, val in list(attrs.items()):
+ if hasattr(stv, key):
+ setattr(stv, key, val)
+ return 0
+--- fs/expose/http.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/http.py
+@@ -1,13 +1,13 @@
+ __all__ = ["serve_fs"]
+
+-import SimpleHTTPServer
+-import SocketServer
++import http.server
++import socketserver
+ from fs.path import pathjoin, dirname
+ from fs.errors import FSError
+ from time import mktime
+-from cStringIO import StringIO
++from io import StringIO
+ import cgi
+-import urllib
++import urllib.request, urllib.parse, urllib.error
+ import posixpath
+ import time
+ import threading
+@@ -16,13 +16,13 @@ import socket
+ def _datetime_to_epoch(d):
+ return mktime(d.timetuple())
+
+-class FSHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
++class FSHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
+
+ """A hacked together version of SimpleHTTPRequestHandler"""
+
+ def __init__(self, fs, request, client_address, server):
+ self._fs = fs
+- SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, request, client_address, server)
++ http.server.SimpleHTTPRequestHandler.__init__(self, request, client_address, server)
+
+ def do_GET(self):
+ """Serve a GET request."""
+@@ -69,7 +69,7 @@ class FSHTTPRequestHandler(SimpleHTTPServer.SimpleHTTP
+ try:
+ info = self._fs.getinfo(path)
+ f = self._fs.open(path, 'rb')
+- except FSError, e:
++ except FSError as e:
+ self.send_error(404, str(e))
+ return None
+ self.send_response(200)
+@@ -98,7 +98,7 @@ class FSHTTPRequestHandler(SimpleHTTPServer.SimpleHTTP
+ paths = [p+'/' for p in sorted(dir_paths, key=lambda p:p.lower())] + sorted(file_paths, key=lambda p:p.lower())
+ #list.sort(key=lambda a: a.lower())
+ f = StringIO()
+- displaypath = cgi.escape(urllib.unquote(self.path))
++ displaypath = cgi.escape(urllib.parse.unquote(self.path))
+ f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
+ f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
+ f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
+@@ -106,11 +106,11 @@ class FSHTTPRequestHandler(SimpleHTTPServer.SimpleHTTP
+
+ parent = dirname(path)
+ if path != parent:
+- f.write('<li><a href="%s">../</a></li>' % urllib.quote(parent.rstrip('/') + '/'))
++ f.write('<li><a href="%s">../</a></li>' % urllib.parse.quote(parent.rstrip('/') + '/'))
+
+ for path in paths:
+ f.write('<li><a href="%s">%s</a>\n'
+- % (urllib.quote(path), cgi.escape(path)))
++ % (urllib.parse.quote(path), cgi.escape(path)))
+ f.write("</ul>\n<hr>\n</body>\n</html>\n")
+ length = f.tell()
+ f.seek(0)
+@@ -124,7 +124,7 @@ class FSHTTPRequestHandler(SimpleHTTPServer.SimpleHTTP
+ # abandon query parameters
+ path = path.split('?',1)[0]
+ path = path.split('#',1)[0]
+- path = posixpath.normpath(urllib.unquote(path))
++ path = posixpath.normpath(urllib.parse.unquote(path))
+ return path
+
+
+@@ -143,7 +143,7 @@ def serve_fs(fs, address='', port=8000):
+
+ #class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
+ # pass
+- httpd = SocketServer.TCPServer((address, port), Handler, bind_and_activate=False)
++ httpd = socketserver.TCPServer((address, port), Handler, bind_and_activate=False)
+ #httpd = ThreadedTCPServer((address, port), Handler, bind_and_activate=False)
+ httpd.allow_reuse_address = True
+ httpd.server_bind()
+--- fs/expose/importhook.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/importhook.py
+@@ -60,7 +60,7 @@ class FSImportHook(object):
+ def __init__(self,fs_or_url):
+ # If given a string, try to open it as an FS url.
+ # Don't open things on the local filesystem though.
+- if isinstance(fs_or_url,basestring):
++ if isinstance(fs_or_url,str):
+ if ":/" not in fs_or_url:
+ raise ImportError
+ try:
+@@ -182,7 +182,7 @@ class FSImportHook(object):
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ try:
+- exec code in mod.__dict__
++ exec(code, mod.__dict__)
+ mod.__file__ = self.get_filename(fullname,info)
+ if self.is_package(fullname,info):
+ if self.path is None:
+@@ -231,7 +231,7 @@ class FSImportHook(object):
+ """Read the specified data file."""
+ try:
+ return self.fs.getcontents(path, 'rb')
+- except FSError, e:
++ except FSError as e:
+ raise IOError(str(e))
+
+ def get_filename(self,fullname,info=None):
+--- fs/expose/sftp.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/sftp.py
+@@ -24,12 +24,12 @@ is, you probably don't want to use it.
+
+ """
+
+-from __future__ import with_statement
+
++
+ import os
+ import stat as statinfo
+ import time
+-import SocketServer
++import socketserver
+ import threading
+
+ import paramiko
+@@ -73,11 +73,11 @@ def report_sftp_errors(func):
+ def wrapper(*args,**kwds):
+ try:
+ return func(*args, **kwds)
+- except ResourceNotFoundError, e:
++ except ResourceNotFoundError as e:
+ return paramiko.SFTP_NO_SUCH_FILE
+- except UnsupportedError, e:
++ except UnsupportedError as e:
+ return paramiko.SFTP_OP_UNSUPPORTED
+- except FSError, e:
++ except FSError as e:
+ return paramiko.SFTP_FAILURE
+ return wrapper
+
+@@ -114,7 +114,7 @@ class SFTPServerInterface(paramiko.SFTPServerInterface
+
+ @report_sftp_errors
+ def list_folder(self, path):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+ stats = []
+ for entry in self.fs.listdir(path, absolute=True):
+@@ -125,7 +125,7 @@ class SFTPServerInterface(paramiko.SFTPServerInterface
+
+ @report_sftp_errors
+ def stat(self, path):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+
+ info = self.fs.getinfo(path)
+@@ -146,9 +146,9 @@ class SFTPServerInterface(paramiko.SFTPServerInterface
+ stat.st_mtime = time.mktime(info.get("modified_time").timetuple())
+
+ if isdir(self.fs, path, info):
+- stat.st_mode = 0777 | statinfo.S_IFDIR
++ stat.st_mode = 0o777 | statinfo.S_IFDIR
+ else:
+- stat.st_mode = 0777 | statinfo.S_IFREG
++ stat.st_mode = 0o777 | statinfo.S_IFREG
+ return stat
+
+ def lstat(self, path):
+@@ -156,16 +156,16 @@ class SFTPServerInterface(paramiko.SFTPServerInterface
+
+ @report_sftp_errors
+ def remove(self, path):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+ self.fs.remove(path)
+ return paramiko.SFTP_OK
+
+ @report_sftp_errors
+ def rename(self, oldpath, newpath):
+- if not isinstance(oldpath, unicode):
++ if not isinstance(oldpath, str):
+ oldpath = oldpath.decode(self.encoding)
+- if not isinstance(newpath, unicode):
++ if not isinstance(newpath, str):
+ newpath = newpath.decode(self.encoding)
+ if self.fs.isfile(oldpath):
+ self.fs.move(oldpath, newpath)
+@@ -175,14 +175,14 @@ class SFTPServerInterface(paramiko.SFTPServerInterface
+
+ @report_sftp_errors
+ def mkdir(self, path, attr):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+ self.fs.makedir(path)
+ return paramiko.SFTP_OK
+
+ @report_sftp_errors
+ def rmdir(self, path):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+ self.fs.removedir(path)
+ return paramiko.SFTP_OK
+@@ -224,7 +224,7 @@ class SFTPHandle(paramiko.SFTPHandle):
+ super(SFTPHandle, self).__init__(flags)
+ mode = flags_to_mode(flags)
+ self.owner = owner
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.owner.encoding)
+ self.path = path
+ self._file = owner.fs.open(path, mode)
+@@ -263,7 +263,7 @@ class SFTPServer(paramiko.SFTPServer):
+ super(SFTPServer, self).finish_subsystem()
+
+
+-class SFTPRequestHandler(SocketServer.BaseRequestHandler):
++class SFTPRequestHandler(socketserver.BaseRequestHandler):
+ """SocketServer RequestHandler subclass for BaseSFTPServer.
+
+ This RequestHandler subclass creates a paramiko Transport, sets up the
+@@ -305,7 +305,7 @@ class SFTPRequestHandler(SocketServer.BaseRequestHandl
+
+
+
+-class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
++class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
+ pass
+
+
+@@ -334,7 +334,7 @@ class BaseSFTPServer(ThreadedTCPServer):
+ self.host_key = host_key
+ if RequestHandlerClass is None:
+ RequestHandlerClass = SFTPRequestHandler
+- SocketServer.TCPServer.__init__(self, address, RequestHandlerClass)
++ socketserver.TCPServer.__init__(self, address, RequestHandlerClass)
+
+ def shutdown_request(self, request):
+ # Prevent TCPServer from closing the connection prematurely
+--- fs/expose/wsgi/__init__.py.orig 2022-03-04 17:14:43 UTC
++++ fs/expose/wsgi/__init__.py
+@@ -1 +1 @@
+-from wsgi import serve_fs
++from .wsgi import serve_fs
+--- fs/expose/wsgi/serve_home.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/wsgi/serve_home.py
+@@ -1,10 +1,10 @@
+ from wsgiref.simple_server import make_server
+
+ from fs.osfs import OSFS
+-from wsgi import serve_fs
++from .wsgi import serve_fs
+ osfs = OSFS('~/')
+ application = serve_fs(osfs)
+
+ httpd = make_server('', 8000, application)
+-print "Serving on http://127.0.0.1:8000"
++print("Serving on http://127.0.0.1:8000")
+ httpd.serve_forever()
+--- fs/expose/wsgi/wsgi.py.orig 2015-04-12 17:24:29 UTC
++++ fs/expose/wsgi/wsgi.py
+@@ -1,5 +1,5 @@
+
+-import urlparse
++import urllib.parse
+ import mimetypes
+
+ from fs.errors import FSError
+@@ -10,7 +10,7 @@ from datetime import datetime
+ try:
+ from mako.template import Template
+ except ImportError:
+- print "Requires mako templates http://www.makotemplates.org/"
++ print("Requires mako templates http://www.makotemplates.org/")
+ raise
+
+
+@@ -28,7 +28,7 @@ class WSGIServer(object):
+ def __init__(self, serve_fs, indexes=True, dir_template=None, chunk_size=16*1024*1024):
+
+ if dir_template is None:
+- from dirtemplate import template as dir_template
++ from .dirtemplate import template as dir_template
+
+ self.serve_fs = serve_fs
+ self.indexes = indexes
+@@ -57,7 +57,7 @@ class WSGIServer(object):
+ serving_file = None
+ try:
+ serving_file = self.serve_fs.open(path, 'rb')
+- except Exception, e:
++ except Exception as e:
+ if serving_file is not None:
+ serving_file.close()
+ return self.serve_500(request, str(e))
+--- fs/expose/xmlrpc.py.orig 2022-03-04 17:14:43 UTC
++++ fs/expose/xmlrpc.py
+@@ -15,8 +15,8 @@ an FS object, which can then be exposed using whatever
+
+ """
+
+-import xmlrpclib
+-from SimpleXMLRPCServer import SimpleXMLRPCServer
++import xmlrpc.client
++from xmlrpc.server import SimpleXMLRPCServer
+ from datetime import datetime
+ import base64
+
+@@ -61,13 +61,13 @@ class RPCFSInterface(object):
+
+ def getmeta(self, meta_name):
+ meta = self.fs.getmeta(meta_name)
+- if isinstance(meta, basestring):
++ if isinstance(meta, str):
+ meta = self.decode_path(meta)
+ return meta
+
+ def getmeta_default(self, meta_name, default):
+ meta = self.fs.getmeta(meta_name, default)
+- if isinstance(meta, basestring):
++ if isinstance(meta, str):
+ meta = self.decode_path(meta)
+ return meta
+
+@@ -77,7 +77,7 @@ class RPCFSInterface(object):
+ def get_contents(self, path, mode="rb"):
+ path = self.decode_path(path)
+ data = self.fs.getcontents(path, mode)
+- return xmlrpclib.Binary(data)
++ return xmlrpc.client.Binary(data)
+
+ def set_contents(self, path, data):
+ path = self.decode_path(path)
+@@ -119,16 +119,16 @@ class RPCFSInterface(object):
+
+ def settimes(self, path, accessed_time, modified_time):
+ path = self.decode_path(path)
+- if isinstance(accessed_time, xmlrpclib.DateTime):
++ if isinstance(accessed_time, xmlrpc.client.DateTime):
+ accessed_time = datetime.strptime(accessed_time.value, "%Y%m%dT%H:%M:%S")
+- if isinstance(modified_time, xmlrpclib.DateTime):
++ if isinstance(modified_time, xmlrpc.client.DateTime):
+ modified_time = datetime.strptime(modified_time.value, "%Y%m%dT%H:%M:%S")
+ return self.fs.settimes(path, accessed_time, modified_time)
+
+ def getinfo(self, path):
+ path = self.decode_path(path)
+ info = self.fs.getinfo(path)
+- info = dict((k, v) for k, v in info.iteritems()
++ info = dict((k, v) for k, v in info.items()
+ if k in self._allowed_info)
+ return info
+
+--- fs/filelike.py.orig 2022-03-04 17:14:43 UTC
++++ fs/filelike.py
+@@ -52,9 +52,9 @@ if PY3:
+ from six import BytesIO as _StringIO
+ else:
+ try:
+- from cStringIO import StringIO as _StringIO
++ from io import StringIO as _StringIO
+ except ImportError:
+- from StringIO import StringIO as _StringIO
++ from io import StringIO as _StringIO
+
+
+ class FileLikeBase(object):
+@@ -305,7 +305,7 @@ class FileLikeBase(object):
+ self.close()
+ return False
+
+- def next(self):
++ def __next__(self):
+ """next() method complying with the iterator protocol.
+
+ File-like objects are their own iterators, with each call to
+--- fs/ftpfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/ftpfs.py
+@@ -37,9 +37,9 @@ if PY3:
+ from six import BytesIO as StringIO
+ else:
+ try:
+- from cStringIO import StringIO
++ from io import StringIO
+ except ImportError:
+- from StringIO import StringIO
++ from io import StringIO
+
+ import time
+
+@@ -221,10 +221,10 @@ class FTPListDataParser(object):
+ elif c == 'r':
+ result.try_retr = True
+ elif c == 's':
+- result.size = long(buf[i+1:j])
++ result.size = int(buf[i+1:j])
+ elif c == 'm':
+ result.mtime_type = MTIME_TYPE.LOCAL
+- result.mtime = long(buf[i+1:j])
++ result.mtime = int(buf[i+1:j])
+ elif c == 'i':
+ result.id_type = ID_TYPE.FULL
+ result.id = buf[i+1:j-i-1]
+@@ -285,7 +285,7 @@ class FTPListDataParser(object):
+
+ elif state == 4: # getting tentative size
+ try:
+- size = long(buf[i:j])
++ size = int(buf[i:j])
+ except ValueError:
+ pass
+ state = 5
+@@ -295,25 +295,25 @@ class FTPListDataParser(object):
+ if month >= 0:
+ state = 6
+ else:
+- size = long(buf[i:j])
++ size = int(buf[i:j])
+
+ elif state == 6: # have size and month
+- mday = long(buf[i:j])
++ mday = int(buf[i:j])
+ state = 7
+
+ elif state == 7: # have size, month, mday
+ if (j - i == 4) and (buf[i+1] == ':'):
+- hour = long(buf[i])
+- minute = long(buf[i+2:i+4])
++ hour = int(buf[i])
++ minute = int(buf[i+2:i+4])
+ result.mtime_type = MTIME_TYPE.REMOTE_MINUTE
+ result.mtime = self._guess_time(month, mday, hour, minute)
+ elif (j - i == 5) and (buf[i+2] == ':'):
+- hour = long(buf[i:i+2])
+- minute = long(buf[i+3:i+5])
++ hour = int(buf[i:i+2])
++ minute = int(buf[i+3:i+5])
+ result.mtime_type = MTIME_TYPE.REMOTE_MINUTE
+ result.mtime = self._guess_time(month, mday, hour, minute)
+ elif j - i >= 4:
+- year = long(buf[i:j])
++ year = int(buf[i:j])
+ result.mtime_type = MTIME_TYPE.REMOTE_DAY
+ result.mtime = self._get_mtime(year, month, mday)
+ else:
+@@ -379,7 +379,7 @@ class FTPListDataParser(object):
+ j = i
+
+ j = buf.index('-', j)
+- mday = long(buf[i:j])
++ mday = int(buf[i:j])
+
+ j = _skip(buf, j, '-')
+ i = j
+@@ -391,13 +391,13 @@ class FTPListDataParser(object):
+ j = _skip(buf, j, '-')
+ i = j
+ j = buf.index(' ', j)
+- year = long(buf[i:j])
++ year = int(buf[i:j])
+
+ j = _skip(buf, j, ' ')
+ i = j
+
+ j = buf.index(':', j)
+- hour = long(buf[i:j])
++ hour = int(buf[i:j])
+ j = _skip(buf, j, ':')
+ i = j
+
+@@ -406,7 +406,7 @@ class FTPListDataParser(object):
+ if j == buflen:
+ raise IndexError # abort, abort!
+
+- minute = long(buf[i:j])
++ minute = int(buf[i:j])
+
+ result.mtime_type = MTIME_TYPE.REMOTE_MINUTE
+ result.mtime = self._get_mtime(year, month, mday, hour, minute)
+@@ -430,17 +430,17 @@ class FTPListDataParser(object):
+ result = FTPListData(buf)
+
+ j = buf.index('-', j)
+- month = long(buf[i:j])
++ month = int(buf[i:j])
+
+ j = _skip(buf, j, '-')
+ i = j
+ j = buf.index('-', j)
+- mday = long(buf[i:j])
++ mday = int(buf[i:j])
+
+ j = _skip(buf, j, '-')
+ i = j
+ j = buf.index(' ', j)
+- year = long(buf[i:j])
++ year = int(buf[i:j])
+ if year < 50:
+ year += 2000
+ if year < 1000:
+@@ -449,14 +449,14 @@ class FTPListDataParser(object):
+ j = _skip(buf, j, ' ')
+ i = j
+ j = buf.index(':', j)
+- hour = long(buf[i:j])
++ hour = int(buf[i:j])
+ j = _skip(buf, j, ':')
+ i = j
+ while not (buf[j] in 'AP'):
+ j += 1
+ if j == buflen:
+ raise IndexError
+- minute = long(buf[i:j])
++ minute = int(buf[i:j])
+
+ if buf[j] == 'A':
+ j += 1
+@@ -482,7 +482,7 @@ class FTPListDataParser(object):
+ i = j
+ j = buf.index(' ', j)
+
+- result.size = long(buf[i:j])
++ result.size = int(buf[i:j])
+ result.try_retr = True
+
+ j = _skip(buf, j, ' ')
+@@ -546,10 +546,10 @@ class FTPMlstDataParser(object):
+ int(factvalue[12:14]),
+ 0, 0, 0))
+ elif factname == 'size':
+- result.size = long(factvalue)
++ result.size = int(factvalue)
+ elif factname == 'sizd':
+ # some FTP servers report directory size with sizd
+- result.size = long(factvalue)
++ result.size = int(factvalue)
+ elif factname == 'type':
+ if factvalue.lower() == 'file':
+ result.try_retr = True
+@@ -605,7 +605,7 @@ def fileftperrors(f):
+ try:
+ try:
+ ret = f(self, *args, **kwargs)
+- except Exception, e:
++ except Exception as e:
+ self.ftpfs._translate_exception(args[0] if args else '', e)
+ finally:
+ self._lock.release()
+@@ -795,16 +795,16 @@ class _FTPFile(object):
+ self.conn.close()
+ self.conn = None
+ self.ftp.voidresp()
+- except error_temp, error_perm:
++ except error_temp as error_perm:
+ pass
+ if self.ftp is not None:
+ try:
+ self.ftp.close()
+- except error_temp, error_perm:
++ except error_temp as error_perm:
+ pass
+ self.closed = True
+
+- def next(self):
++ def __next__(self):
+ return self.readline()
+
+ def readline(self, size=None):
+@@ -823,7 +823,7 @@ def ftperrors(f):
+ try:
+ try:
+ ret = f(self, *args, **kwargs)
+- except Exception, e:
++ except Exception as e:
+ self._translate_exception(args[0] if args else '', e)
+ finally:
+ self._leave_dircache()
+@@ -834,7 +834,7 @@ def ftperrors(f):
+
+
+ def _encode(s):
+- if isinstance(s, unicode):
++ if isinstance(s, str):
+ return s.encode('utf-8')
+ return s
+
+@@ -956,7 +956,7 @@ class FTPFS(FS):
+ return features
+
+ def on_line(line):
+- if not isinstance(line, unicode):
++ if not isinstance(line, str):
+ line = line.decode('utf-8')
+ info = parse_ftp_list_line(line, self.use_mlst)
+ if info:
+@@ -986,7 +986,7 @@ class FTPFS(FS):
+ else: # Matrix FTP server has bug
+ on_line(list_line)
+ # if it's a dir, then we can send a MLSD
+- if dirlist[dirlist.keys()[0]]['try_cwd']:
++ if dirlist[list(dirlist.keys())[0]]['try_cwd']:
+ dirlist = {}
+ self.ftp.retrlines("MLSD " + encoded_path, on_line)
+ else:
+@@ -996,11 +996,11 @@ class FTPFS(FS):
+ self.dircache[path] = dirlist
+
+ def is_symlink(info):
+- return info['try_retr'] and info['try_cwd'] and info.has_key('target')
++ return info['try_retr'] and info['try_cwd'] and 'target' in info
+
+ def resolve_symlink(linkpath):
+ linkinfo = self.getinfo(linkpath)
+- if not linkinfo.has_key('resolved'):
++ if 'resolved' not in linkinfo:
+ linkinfo['resolved'] = linkpath
+ if is_symlink(linkinfo):
+ target = linkinfo['target']
+@@ -1036,7 +1036,7 @@ class FTPFS(FS):
+ else:
+ dircache = self.dircache
+ paths = [normpath(abspath(path)) for path in paths]
+- for cached_path in dircache.keys():
++ for cached_path in list(dircache.keys()):
+ for path in paths:
+ if isbase(cached_path, path):
+ dircache.pop(cached_path, None)
+@@ -1083,7 +1083,7 @@ class FTPFS(FS):
+ else:
+ ftp.connect(self.host, self.port, self.timeout)
+ ftp.login(self.user, self.passwd, self.acct)
+- except socket_error, e:
++ except socket_error as e:
+ raise RemoteConnectionError(str(e), details=e)
+ return ftp
+
+@@ -1104,7 +1104,7 @@ class FTPFS(FS):
+ return '<FTPFS %s>' % self.host
+
+ def __unicode__(self):
+- return u'<FTPFS %s>' % self.host
++ return '<FTPFS %s>' % self.host
+
+ @convert_os_errors
+ def _translate_exception(self, path, exception):
+@@ -1225,7 +1225,7 @@ class FTPFS(FS):
+ raise ResourceNotFoundError(path)
+ if not self.isdir(path):
+ raise ResourceInvalidError(path)
+- paths = self._readdir(path).keys()
++ paths = list(self._readdir(path).keys())
+
+ return self._listdir_helper(path, paths, wildcard, full, absolute, dirs_only, files_only)
+
+@@ -1266,7 +1266,7 @@ class FTPFS(FS):
+ self.ftp.mkd(_encode(path))
+ except error_reply:
+ return
+- except error_perm, e:
++ except error_perm as e:
+ if recursive or allow_recreate:
+ return
+ if str(e).split(' ', 1)[0]=='550':
+@@ -1337,7 +1337,7 @@ class FTPFS(FS):
+ try:
+ self.refresh_dircache(dirname(src), dirname(dst))
+ self.ftp.rename(_encode(src), _encode(dst))
+- except error_perm, exception:
++ except error_perm as exception:
+ code, message = str(exception).split(' ', 1)
+ if code == "550":
+ if not self.exists(dirname(dst)):
+--- fs/httpfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/httpfs.py
+@@ -10,8 +10,8 @@ from fs.path import normpath
+ from fs.errors import ResourceNotFoundError, UnsupportedError
+ from fs.filelike import FileWrapper
+ from fs import iotools
+-
+-from urllib2 import urlopen, URLError
++from urllib.request import urlopen
++from urllib.error import URLError
+ from datetime import datetime
+
+
+@@ -50,9 +50,9 @@ class HTTPFS(FS):
+ url = self._make_url(path)
+ try:
+ f = urlopen(url)
+- except URLError, e:
++ except URLError as e:
+ raise ResourceNotFoundError(path, details=e)
+- except OSError, e:
++ except OSError as e:
+ raise ResourceNotFoundError(path, details=e)
+
+ return FileWrapper(f)
+--- fs/iotools.py.orig 2022-03-04 17:14:43 UTC
++++ fs/iotools.py
+@@ -1,6 +1,6 @@
+-from __future__ import unicode_literals
+-from __future__ import print_function
+
++
++
+ from fs import SEEK_SET, SEEK_CUR, SEEK_END
+
+ import io
+@@ -178,7 +178,7 @@ def make_bytes_io(data, encoding=None, errors=None):
+ if hasattr(data, 'mode') and 'b' in data.mode:
+ # It's already a binary file
+ return data
+- if not isinstance(data, basestring):
++ if not isinstance(data, str):
+ # It's a file, but we don't know if its binary
+ # TODO: Is there a better way than reading the entire file?
+ data = data.read() or b''
+--- fs/memoryfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/memoryfs.py
+@@ -78,10 +78,10 @@ class MemoryFile(object):
+ return "<MemoryFile in %s %s>" % (self.memory_fs, self.path)
+
+ def __repr__(self):
+- return u"<MemoryFile in %s %s>" % (self.memory_fs, self.path)
++ return "<MemoryFile in %s %s>" % (self.memory_fs, self.path)
+
+ def __unicode__(self):
+- return u"<MemoryFile in %s %s>" % (self.memory_fs, self.path)
++ return "<MemoryFile in %s %s>" % (self.memory_fs, self.path)
+
+ def __del__(self):
+ if not self.closed:
+@@ -101,7 +101,7 @@ class MemoryFile(object):
+ def next(self):
+ if 'r' not in self.mode and '+' not in self.mode:
+ raise IOError("File not open for reading")
+- return self.mem_file.next()
++ return next(self.mem_file)
+
+ @seek_and_lock
+ def readline(self, *args, **kwargs):
+@@ -218,7 +218,7 @@ class DirEntry(object):
+ if self.isfile():
+ return "<file %s>" % self.name
+ elif self.isdir():
+- return "<dir %s>" % "".join("%s: %s" % (k, v.desc_contents()) for k, v in self.contents.iteritems())
++ return "<dir %s>" % "".join("%s: %s" % (k, v.desc_contents()) for k, v in self.contents.items())
+
+ def isdir(self):
+ return self.type == "dir"
+@@ -559,10 +559,10 @@ class MemoryFS(FS):
+ raise ResourceNotFoundError(path)
+ if dir_entry.isfile():
+ raise ResourceInvalidError(path, msg="not a directory: %(path)s")
+- paths = dir_entry.contents.keys()
++ paths = list(dir_entry.contents.keys())
+ for (i,p) in enumerate(paths):
+- if not isinstance(p,unicode):
+- paths[i] = unicode(p)
++ if not isinstance(p,str):
++ paths[i] = str(p)
+ return self._listdir_helper(path, paths, wildcard, full, absolute, dirs_only, files_only)
+
+ @synchronize
+@@ -578,10 +578,10 @@ class MemoryFS(FS):
+ info['accessed_time'] = dir_entry.accessed_time
+
+ if dir_entry.isdir():
+- info['st_mode'] = 0755 | stat.S_IFDIR
++ info['st_mode'] = 0o755 | stat.S_IFDIR
+ else:
+ info['size'] = len(dir_entry.data or b(''))
+- info['st_mode'] = 0666 | stat.S_IFREG
++ info['st_mode'] = 0o666 | stat.S_IFREG
+
+ return info
+
+@@ -671,12 +671,12 @@ class MemoryFS(FS):
+ @synchronize
+ def setxattr(self, path, key, value):
+ dir_entry = self._dir_entry(path)
+- key = unicode(key)
++ key = str(key)
+ dir_entry.xattrs[key] = value
+
+ @synchronize
+ def getxattr(self, path, key, default=None):
+- key = unicode(key)
++ key = str(key)
+ dir_entry = self._dir_entry(path)
+ return dir_entry.xattrs.get(key, default)
+
+@@ -691,4 +691,4 @@ class MemoryFS(FS):
+ @synchronize
+ def listxattrs(self, path):
+ dir_entry = self._dir_entry(path)
+- return dir_entry.xattrs.keys()
++ return list(dir_entry.xattrs.keys())
+--- fs/mountfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/mountfs.py
+@@ -61,7 +61,7 @@ class DirMount(object):
+ return "<DirMount %s, %s>" % (self.path, self.fs)
+
+ def __unicode__(self):
+- return u"<DirMount %s, %s>" % (self.path, self.fs)
++ return "<DirMount %s, %s>" % (self.path, self.fs)
+
+
+ class FileMount(object):
+@@ -90,12 +90,12 @@ class MountFS(FS):
+ self.mount_tree = PathMap()
+
+ def __str__(self):
+- return "<%s [%s]>" % (self.__class__.__name__,self.mount_tree.items(),)
++ return "<%s [%s]>" % (self.__class__.__name__,list(self.mount_tree.items()),)
+
+ __repr__ = __str__
+
+ def __unicode__(self):
+- return u"<%s [%s]>" % (self.__class__.__name__,self.mount_tree.items(),)
++ return "<%s [%s]>" % (self.__class__.__name__,list(self.mount_tree.items()),)
+
+ def _delegate(self, path):
+ path = abspath(normpath(path))
+@@ -119,7 +119,7 @@ class MountFS(FS):
+ return self, "/", path
+
+ try:
+- self.mount_tree.iternames(path).next()
++ next(self.mount_tree.iternames(path))
+ except StopIteration:
+ return None, None, None
+ else:
+@@ -129,7 +129,7 @@ class MountFS(FS):
+ def close(self):
+ # Explicitly closes children if requested
+ if self.auto_close:
+- for mount in self.mount_tree.itervalues():
++ for mount in self.mount_tree.values():
+ mount.fs.close()
+ # Free references (which may incidently call the close method of the child filesystems)
+ self.mount_tree.clear()
+--- fs/multifs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/multifs.py
+@@ -106,7 +106,7 @@ class MultiFS(FS):
+
+ @synchronize
+ def __unicode__(self):
+- return u"<MultiFS: %s>" % ", ".join(unicode(fs) for fs in self.fs_sequence)
++ return "<MultiFS: %s>" % ", ".join(str(fs) for fs in self.fs_sequence)
+
+ def _get_priority(self, name):
+ return self.fs_priorities[name]
+@@ -128,7 +128,7 @@ class MultiFS(FS):
+
+ def _priority_sort(self):
+ """Sort filesystems by priority order"""
+- priority_order = sorted(self.fs_lookup.keys(), key=lambda n: self.fs_priorities[n], reverse=True)
++ priority_order = sorted(list(self.fs_lookup.keys()), key=lambda n: self.fs_priorities[n], reverse=True)
+ self.fs_sequence = [self.fs_lookup[name] for name in priority_order]
+
+ @synchronize
+@@ -214,7 +214,7 @@ class MultiFS(FS):
+ return self.writefs
+ for fs in self:
+ if fs.exists(path):
+- for fs_name, fs_object in self.fs_lookup.iteritems():
++ for fs_name, fs_object in self.fs_lookup.items():
+ if fs is fs_object:
+ return fs_name, fs
+ raise ResourceNotFoundError(path, msg="Path does not map to any filesystem: %(path)s")
+--- fs/opener.py.orig 2022-03-04 17:14:43 UTC
++++ fs/opener.py
+@@ -72,7 +72,7 @@ from fs.filelike import FileWrapper
+ from os import getcwd
+ import os.path
+ import re
+-from urlparse import urlparse
++from urllib.parse import urlparse
+
+ class OpenerError(Exception):
+ """The base exception thrown by openers"""
+@@ -794,7 +794,7 @@ example:
+ def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create_dir):
+
+ from fs.mountfs import MountFS
+- from ConfigParser import ConfigParser
++ from configparser import ConfigParser
+ cfg = ConfigParser()
+
+ if '#' in fs_path:
+@@ -830,7 +830,7 @@ example:
+ def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create_dir):
+
+ from fs.multifs import MultiFS
+- from ConfigParser import ConfigParser
++ from configparser import ConfigParser
+ cfg = ConfigParser()
+
+ if '#' in fs_path:
+--- fs/osfs/__init__.py.orig 2015-11-13 22:18:37 UTC
++++ fs/osfs/__init__.py
+@@ -48,13 +48,13 @@ def _os_stat(path):
+
+
+ @convert_os_errors
+-def _os_mkdir(name, mode=0777):
++def _os_mkdir(name, mode=0o777):
+ """Replacement for os.mkdir that raises FSError subclasses."""
+ return os.mkdir(name, mode)
+
+
+ @convert_os_errors
+-def _os_makedirs(name, mode=0777):
++def _os_makedirs(name, mode=0o777):
+ """Replacement for os.makdirs that raises FSError subclasses.
+
+ This implementation also correctly handles win32 long filenames (those
+@@ -71,7 +71,7 @@ def _os_makedirs(name, mode=0777):
+ if head and tail and not os.path.exists(head):
+ try:
+ _os_makedirs(head, mode)
+- except OSError, e:
++ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ if tail == os.curdir:
+@@ -98,11 +98,11 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ 'atomic.setcontents': False}
+
+ if platform.system() == 'Windows':
+- _meta["invalid_path_chars"] = ''.join(chr(n) for n in xrange(31)) + '\\:*?"<>|'
++ _meta["invalid_path_chars"] = ''.join(chr(n) for n in range(31)) + '\\:*?"<>|'
+ else:
+ _meta["invalid_path_chars"] = '\0'
+
+- def __init__(self, root_path, thread_synchronize=_thread_synchronize_default, encoding=None, create=False, dir_mode=0700, use_long_paths=True):
++ def __init__(self, root_path, thread_synchronize=_thread_synchronize_default, encoding=None, create=False, dir_mode=0o700, use_long_paths=True):
+ """
+ Creates an FS object that represents the OS Filesystem under a given root path
+
+@@ -124,13 +124,13 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ if sys.platform == "win32":
+ if use_long_paths and not root_path.startswith("\\\\?\\"):
+ if not root_path.startswith("\\"):
+- root_path = u"\\\\?\\" + root_path
++ root_path = "\\\\?\\" + root_path
+ else:
+ # Explicitly mark UNC paths, seems to work better.
+ if root_path.startswith("\\\\"):
+- root_path = u"\\\\?\\UNC\\" + root_path[2:]
++ root_path = "\\\\?\\UNC\\" + root_path[2:]
+ else:
+- root_path = u"\\\\?" + root_path
++ root_path = "\\\\?" + root_path
+ # If it points at the root of a drive, it needs a trailing slash.
+ if len(root_path) == 6 and not root_path.endswith("\\"):
+ root_path = root_path + "\\"
+@@ -155,16 +155,16 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ return "<OSFS: %r>" % self.root_path
+
+ def __unicode__(self):
+- return u"<OSFS: %s>" % self.root_path
++ return "<OSFS: %s>" % self.root_path
+
+ def _decode_path(self, p):
+- if isinstance(p, unicode):
++ if isinstance(p, str):
+ return p
+ return p.decode(self.encoding, 'replace')
+
+ def getsyspath(self, path, allow_none=False):
+ self.validatepath(path)
+- path = relpath(normpath(path)).replace(u"/", os.sep)
++ path = relpath(normpath(path)).replace("/", os.sep)
+ path = os.path.join(self.root_path, path)
+ if not path.startswith(self.root_path):
+ raise PathError(path, msg="OSFS given path outside root: %(path)s")
+@@ -234,7 +234,7 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ encoding = encoding or 'utf-8'
+ try:
+ return io.open(sys_path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline)
+- except EnvironmentError, e:
++ except EnvironmentError as e:
+ # Win32 gives EACCES when opening a directory.
+ if sys.platform == "win32" and e.errno in (errno.EACCES,):
+ if self.isdir(path):
+@@ -301,7 +301,7 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ sys_path = self.getsyspath(path)
+ try:
+ os.remove(sys_path)
+- except OSError, e:
++ except OSError as e:
+ if e.errno == errno.EACCES and sys.platform == "win32":
+ # sometimes windows says this for attempts to remove a dir
+ if os.path.isdir(sys_path):
+@@ -338,7 +338,7 @@ class OSFS(OSFSXAttrMixin, OSFSWatchMixin, FS):
+ path_dst = self.getsyspath(dst)
+ try:
+ os.rename(path_src, path_dst)
+- except OSError, e:
++ except OSError as e:
+ if e.errno:
+ # POSIX rename() can rename over an empty directory but gives
+ # ENOTEMPTY if the dir has contents. Raise UnsupportedError
+--- fs/osfs/watch_inotify.py.orig 2015-04-12 17:24:29 UTC
++++ fs/osfs/watch_inotify.py
+@@ -18,7 +18,7 @@ from fs.watch import *
+
+ try:
+ import pyinotify
+-except Exception, e:
++except Exception as e:
+ # pyinotify sometimes raises its own custom errors on import.
+ # How on earth are we supposed to catch them when we can't import them?
+ if isinstance(e,ImportError):
+@@ -39,7 +39,7 @@ class OSFSWatchMixin(WatchableFSMixin):
+ def close(self):
+ super(OSFSWatchMixin,self).close()
+ self.notify_watchers(CLOSED)
+- for watcher_list in self._watchers.values():
++ for watcher_list in list(self._watchers.values()):
+ for watcher in watcher_list:
+ self.del_watcher(watcher)
+ self.__watch_lock.acquire()
+@@ -58,7 +58,7 @@ class OSFSWatchMixin(WatchableFSMixin):
+ w = super_add_watcher(callback,path,events,recursive)
+ w._pyinotify_id = None
+ syspath = self.getsyspath(path)
+- if isinstance(syspath,unicode):
++ if isinstance(syspath,str):
+ syspath = syspath.encode(sys.getfilesystemencoding())
+ # Each watch gets its own WatchManager, since it's tricky to make
+ # a single WatchManager handle multiple callbacks with different
+@@ -73,7 +73,7 @@ class OSFSWatchMixin(WatchableFSMixin):
+ kwds = dict(rec=recursive,auto_add=recursive,quiet=False)
+ try:
+ wids = wm.add_watch(syspath,evtmask,process_events,**kwds)
+- except pyinotify.WatchManagerError, e:
++ except pyinotify.WatchManagerError as e:
+ raise OperationFailedError("add_watcher",details=e)
+ w._pyinotify_id = wids[syspath]
+ self.__watch_lock.acquire()
+@@ -239,7 +239,7 @@ class SharedThreadedNotifier(threading.Thread):
+ while self.running:
+ try:
+ ready_fds = self._poller.poll()
+- except _select_error, e:
++ except _select_error as e:
+ if e[0] != errno.EINTR:
+ raise
+ else:
+--- fs/osfs/watch_win32.py.orig 2015-04-12 17:24:29 UTC
++++ fs/osfs/watch_win32.py
+@@ -10,7 +10,7 @@ import os
+ import sys
+ import errno
+ import threading
+-import Queue
++import queue
+ import stat
+ import struct
+ import ctypes
+@@ -226,7 +226,7 @@ class WatchedDirectory(object):
+ ctypes.byref(self.result),len(self.result),
+ self.recursive,self.flags,None,
+ overlapped,None)
+- except WindowsError, e:
++ except WindowsError as e:
+ self.error = e
+ self.close()
+
+@@ -262,7 +262,7 @@ class WatchThread(threading.Thread):
+ self.watched_directories = {}
+ self.ready = threading.Event()
+ self._iocp = None
+- self._new_watches = Queue.Queue()
++ self._new_watches = queue.Queue()
+
+ def close(self):
+ if not self.closed:
+@@ -383,11 +383,11 @@ class WatchThread(threading.Thread):
+ hash(w),0)
+ w.post()
+ w.ready.set()
+- except Queue.Empty:
++ except queue.Empty:
+ pass
+ finally:
+ self.ready.set()
+- for w in self.watched_directories.itervalues():
++ for w in self.watched_directories.values():
+ w.close()
+ if self._iocp:
+ CloseHandle(self._iocp)
+--- fs/osfs/xattrs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/osfs/xattrs.py
+@@ -49,7 +49,7 @@ if xattr is not None:
+
+ @convert_os_errors
+ def listxattrs(self, path):
+- return xattr.xattr(self.getsyspath(path)).keys()
++ return list(xattr.xattr(self.getsyspath(path)).keys())
+
+ else:
+
+--- fs/path.py.orig 2015-04-12 17:24:29 UTC
++++ fs/path.py
+@@ -44,7 +44,7 @@ def normpath(path):
+ if not _requires_normalization(path):
+ return path.rstrip('/')
+
+- prefix = u'/' if path.startswith('/') else u''
++ prefix = '/' if path.startswith('/') else ''
+ components = []
+ append = components.append
+ special = ('..', '.', '').__contains__
+@@ -60,7 +60,7 @@ def normpath(path):
+ # causing a circular import.
+ from fs.errors import BackReferenceError
+ raise BackReferenceError('Too many backrefs in \'%s\'' % path)
+- return prefix + u'/'.join(components)
++ return prefix + '/'.join(components)
+
+
+ if os.sep != '/':
+@@ -100,11 +100,11 @@ def recursepath(path, reverse=False):
+ """
+
+ if path in ('', '/'):
+- return [u'/']
++ return ['/']
+
+ path = abspath(normpath(path)) + '/'
+
+- paths = [u'/']
++ paths = ['/']
+ find = path.find
+ append = paths.append
+ pos = 1
+@@ -133,7 +133,7 @@ def abspath(path):
+
+ """
+ if not path.startswith('/'):
+- return u'/' + path
++ return '/' + path
+ return path
+
+
+@@ -176,7 +176,7 @@ def pathjoin(*paths):
+ absolute = True
+ relpaths.append(p)
+
+- path = normpath(u"/".join(relpaths))
++ path = normpath("/".join(relpaths))
+ if absolute:
+ path = abspath(path)
+ return path
+@@ -419,7 +419,7 @@ def relativefrom(base, path):
+ break
+ common += 1
+
+- return u'/'.join([u'..'] * (len(base) - common) + path[common:])
++ return '/'.join(['..'] * (len(base) - common) + path[common:])
+
+
+ class PathMap(object):
+@@ -559,7 +559,7 @@ class PathMap(object):
+ m = m[name]
+ except KeyError:
+ return
+- for (nm, subm) in m.iteritems():
++ for (nm, subm) in m.items():
+ if not nm:
+ yield abspath(root)
+ else:
+@@ -568,7 +568,7 @@ class PathMap(object):
+ yield subk
+
+ def __iter__(self):
+- return self.iterkeys()
++ return iter(self.keys())
+
+ def keys(self,root="/"):
+ return list(self.iterkeys(root))
+@@ -583,7 +583,7 @@ class PathMap(object):
+ m = m[name]
+ except KeyError:
+ return
+- for (nm, subm) in m.iteritems():
++ for (nm, subm) in m.items():
+ if not nm:
+ yield subm
+ else:
+@@ -604,7 +604,7 @@ class PathMap(object):
+ m = m[name]
+ except KeyError:
+ return
+- for (nm, subm) in m.iteritems():
++ for (nm, subm) in m.items():
+ if not nm:
+ yield (abspath(normpath(root)), subm)
+ else:
+@@ -627,7 +627,7 @@ class PathMap(object):
+ m = m[name]
+ except KeyError:
+ return
+- for (nm, subm) in m.iteritems():
++ for (nm, subm) in m.items():
+ if nm and subm:
+ yield nm
+
+@@ -651,9 +651,9 @@ def iswildcard(path):
+ return not _wild_chars.isdisjoint(path)
+
+ if __name__ == "__main__":
+- print recursepath('a/b/c')
++ print(recursepath('a/b/c'))
+
+- print relativefrom('/', '/foo')
+- print relativefrom('/foo/bar', '/foo/baz')
+- print relativefrom('/foo/bar/baz', '/foo/egg')
+- print relativefrom('/foo/bar/baz/egg', '/foo/egg')
++ print(relativefrom('/', '/foo'))
++ print(relativefrom('/foo/bar', '/foo/baz'))
++ print(relativefrom('/foo/bar/baz', '/foo/egg'))
++ print(relativefrom('/foo/bar/baz/egg', '/foo/egg'))
+--- fs/remote.py.orig 2015-04-12 17:24:29 UTC
++++ fs/remote.py
+@@ -20,8 +20,8 @@ FS subclasses interfacing with a remote filesystem. T
+
+ """
+
+-from __future__ import with_statement
+
++
+ import time
+ import stat as statinfo
+ from errno import EINVAL
+@@ -422,11 +422,11 @@ class CachedInfo(object):
+ self.has_full_children = other.has_full_children
+ @classmethod
+ def new_file_stub(cls):
+- info = {"info" : 0700 | statinfo.S_IFREG}
++ info = {"info" : 0o700 | statinfo.S_IFREG}
+ return cls(info,has_full_info=False)
+ @classmethod
+ def new_dir_stub(cls):
+- info = {"info" : 0700 | statinfo.S_IFDIR}
++ info = {"info" : 0o700 | statinfo.S_IFDIR}
+ return cls(info,has_full_info=False)
+
+
+@@ -512,7 +512,7 @@ class CacheFSMixin(FS):
+ if self.max_cache_size is not None and old_ci is None:
+ while self.__cache_size >= self.max_cache_size:
+ try:
+- to_del = iter(self.__cache).next()
++ to_del = next(iter(self.__cache))
+ except StopIteration:
+ break
+ else:
+@@ -592,7 +592,7 @@ class CacheFSMixin(FS):
+
+ def isdir(self, path):
+ try:
+- self.__cache.iternames(path).next()
++ next(self.__cache.iternames(path))
+ return True
+ except StopIteration:
+ pass
+@@ -607,7 +607,7 @@ class CacheFSMixin(FS):
+
+ def isfile(self, path):
+ try:
+- self.__cache.iternames(path).next()
++ next(self.__cache.iternames(path))
+ return False
+ except StopIteration:
+ pass
+--- fs/remotefs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/remotefs.py
+@@ -1,5 +1,5 @@
+ # Work in Progress - Do not use
+-from __future__ import with_statement
++
+ from fs.base import FS
+ from fs.expose.serve import packetstream
+
+@@ -7,7 +7,7 @@ from collections import defaultdict
+ import threading
+ from threading import Lock, RLock
+ from json import dumps
+-import Queue as queue
++import queue as queue
+ import socket
+
+ from six import b
+@@ -35,12 +35,12 @@ class PacketHandler(threading.Thread):
+ while True:
+ data = read(1024*16)
+ if not data:
+- print "No data"
++ print("No data")
+ break
+- print "data", repr(data)
++ print("data", repr(data))
+ for header, payload in decoder.feed(data):
+- print repr(header)
+- print repr(payload)
++ print(repr(header))
++ print(repr(payload))
+ on_packet(header, payload)
+
+ def _new_call_id(self):
+@@ -77,8 +77,8 @@ class PacketHandler(threading.Thread):
+
+ while True:
+ header, payload = queue.get()
+- print repr(header)
+- print repr(payload)
++ print(repr(header))
++ print(repr(payload))
+ if client_ref is not None and header.get('client_ref') != client_ref:
+ continue
+ break
+@@ -167,9 +167,9 @@ class RemoteFS(FS):
+ def ping(self, msg):
+ call_id = self.packet_handler.send_packet({'type':'rpc', 'method':'ping'}, msg)
+ header, payload = self.packet_handler.get_packet(call_id)
+- print "PING"
+- print header
+- print payload
++ print("PING")
++ print(header)
++ print(payload)
+
+ def close(self):
+ self.transport.close()
+--- fs/rpcfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/rpcfs.py
+@@ -8,7 +8,7 @@ class from the :mod:`fs.expose.xmlrpc` module.
+
+ """
+
+-import xmlrpclib
++import xmlrpc.client
+ import socket
+ import base64
+
+@@ -28,11 +28,11 @@ def re_raise_faults(func):
+ def wrapper(*args, **kwds):
+ try:
+ return func(*args, **kwds)
+- except (xmlrpclib.Fault), f:
++ except (xmlrpc.client.Fault) as f:
+ #raise
+ # Make sure it's in a form we can handle
+
+- print f.faultString
++ print(f.faultString)
+ bits = f.faultString.split(" ")
+ if bits[0] not in ["<type", "<class"]:
+ raise f
+@@ -41,7 +41,7 @@ def re_raise_faults(func):
+ cls = bits[0]
+ msg = ">:".join(bits[1:])
+ cls = cls.strip('\'')
+- print "-" + cls
++ print("-" + cls)
+ cls = _object_by_name(cls)
+ # Re-raise using the remainder of the fault code as message
+ if cls:
+@@ -50,7 +50,7 @@ def re_raise_faults(func):
+ else:
+ raise cls(msg)
+ raise f
+- except socket.error, e:
++ except socket.error as e:
+ raise RemoteConnectionError(str(e), details=e)
+ return wrapper
+
+@@ -126,9 +126,9 @@ class RPCFS(FS):
+ kwds = dict(allow_none=True, use_datetime=True)
+
+ if self._transport is not None:
+- proxy = xmlrpclib.ServerProxy(self.uri, self._transport, **kwds)
++ proxy = xmlrpc.client.ServerProxy(self.uri, self._transport, **kwds)
+ else:
+- proxy = xmlrpclib.ServerProxy(self.uri, **kwds)
++ proxy = xmlrpc.client.ServerProxy(self.uri, **kwds)
+
+ return ReRaiseFaults(proxy)
+
+@@ -170,7 +170,7 @@ class RPCFS(FS):
+ meta = self.proxy.getmeta(meta_name)
+ else:
+ meta = self.proxy.getmeta_default(meta_name, default)
+- if isinstance(meta, basestring):
++ if isinstance(meta, str):
+ # To allow transport of meta with invalid xml chars (like null)
+ meta = self.encode_path(meta)
+ return meta
+@@ -185,7 +185,7 @@ class RPCFS(FS):
+ # TODO: chunked transport of large files
+ epath = self.encode_path(path)
+ if "w" in mode:
+- self.proxy.set_contents(epath, xmlrpclib.Binary(b("")))
++ self.proxy.set_contents(epath, xmlrpc.client.Binary(b("")))
+ if "r" in mode or "a" in mode or "+" in mode:
+ try:
+ data = self.proxy.get_contents(epath, "rb").data
+@@ -194,7 +194,7 @@ class RPCFS(FS):
+ raise ResourceNotFoundError(path)
+ if not self.isdir(dirname(path)):
+ raise ParentDirectoryMissingError(path)
+- self.proxy.set_contents(path, xmlrpclib.Binary(b("")))
++ self.proxy.set_contents(path, xmlrpc.client.Binary(b("")))
+ else:
+ data = b("")
+ f = StringIO(data)
+@@ -210,7 +210,7 @@ class RPCFS(FS):
+ self._lock.acquire()
+ try:
+ oldflush()
+- self.proxy.set_contents(epath, xmlrpclib.Binary(f.getvalue()))
++ self.proxy.set_contents(epath, xmlrpc.client.Binary(f.getvalue()))
+ finally:
+ self._lock.release()
+
+--- fs/s3fs.py.orig 2015-11-13 16:37:26 UTC
++++ fs/s3fs.py
+@@ -41,7 +41,7 @@ else:
+ try:
+ return self._map[(threading.currentThread(),attr)]
+ except KeyError:
+- raise AttributeError, attr
++ raise AttributeError(attr)
+ def __setattr__(self,attr,value):
+ self._map[(threading.currentThread(),attr)] = value
+
+@@ -106,7 +106,7 @@ class S3FS(FS):
+ prefix = prefix[1:]
+ if not prefix.endswith(separator) and prefix != "":
+ prefix = prefix + separator
+- if isinstance(prefix,unicode):
++ if isinstance(prefix,str):
+ prefix = prefix.encode("utf8")
+ if aws_access_key is None:
+ if "AWS_ACCESS_KEY_ID" not in os.environ:
+@@ -149,7 +149,7 @@ class S3FS(FS):
+ b.get_key(self._prefix)
+ else:
+ b = self._s3conn.get_bucket(self._bucket_name, validate=1)
+- except S3ResponseError, e:
++ except S3ResponseError as e:
+ if "404 Not Found" not in str(e):
+ raise
+ b = self._s3conn.create_bucket(self._bucket_name)
+@@ -179,7 +179,7 @@ class S3FS(FS):
+ s3path = self._prefix + path
+ if s3path and s3path[-1] == self._separator:
+ s3path = s3path[:-1]
+- if isinstance(s3path,unicode):
++ if isinstance(s3path,str):
+ s3path = s3path.encode("utf8")
+ return s3path
+
+@@ -220,9 +220,9 @@ class S3FS(FS):
+
+ def _sync_set_contents(self,key,contents):
+ """Synchronously set the contents of a key."""
+- if isinstance(key,basestring):
++ if isinstance(key,str):
+ key = self._s3bukt.new_key(key)
+- if isinstance(contents,basestring):
++ if isinstance(contents,str):
+ key.set_contents_from_string(contents)
+ elif hasattr(contents,"md5"):
+ hexmd5 = contents.md5
+@@ -338,7 +338,7 @@ class S3FS(FS):
+ # the directory itself, which other tools may not create.
+ ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
+ try:
+- iter(ks).next()
++ next(iter(ks))
+ except StopIteration:
+ return False
+ else:
+@@ -398,7 +398,7 @@ class S3FS(FS):
+ # Skip over the entry for the directory itself, if it exists
+ name = self._uns3path(k.name,s3path)
+ if name != "":
+- if not isinstance(name,unicode):
++ if not isinstance(name,str):
+ name = name.decode("utf8")
+ if name.endswith(self._separator):
+ name = name[:-1]
+@@ -572,14 +572,14 @@ class S3FS(FS):
+ else:
+ info["name"] = basename(self._uns3key(k.name))
+ if self._key_is_dir(key):
+- info["st_mode"] = 0700 | statinfo.S_IFDIR
++ info["st_mode"] = 0o700 | statinfo.S_IFDIR
+ else:
+- info["st_mode"] = 0700 | statinfo.S_IFREG
++ info["st_mode"] = 0o700 | statinfo.S_IFREG
+ if hasattr(key,"size"):
+ info['size'] = int(key.size)
+ etag = getattr(key,"etag",None)
+ if etag is not None:
+- if isinstance(etag,unicode):
++ if isinstance(etag,str):
+ etag = etag.encode("utf8")
+ info['etag'] = etag.strip('"').strip("'")
+ if hasattr(key,"last_modified"):
+@@ -632,7 +632,7 @@ class S3FS(FS):
+ s3path_src = self._s3path(src)
+ try:
+ self._s3bukt.copy_key(s3path_dst,self._bucket_name,s3path_src)
+- except S3ResponseError, e:
++ except S3ResponseError as e:
+ if "404 Not Found" in str(e):
+ msg = "Source is not a file: %(path)s"
+ raise ResourceInvalidError(src, msg=msg)
+@@ -663,7 +663,7 @@ class S3FS(FS):
+ for k in self._s3bukt.list(prefix=prefix):
+ name = relpath(self._uns3path(k.name,prefix))
+ if name != "":
+- if not isinstance(name,unicode):
++ if not isinstance(name,str):
+ name = name.decode("utf8")
+ if not k.name.endswith(self._separator):
+ if wildcard is not None:
+@@ -691,7 +691,7 @@ class S3FS(FS):
+ for k in self._s3bukt.list(prefix=prefix):
+ name = relpath(self._uns3path(k.name,prefix))
+ if name != "":
+- if not isinstance(name,unicode):
++ if not isinstance(name,str):
+ name = name.decode("utf8")
+ if wildcard is not None:
+ if callable(wildcard):
+@@ -718,7 +718,7 @@ class S3FS(FS):
+ for k in self._s3bukt.list(prefix=prefix):
+ name = relpath(self._uns3path(k.name,prefix))
+ if name != "":
+- if not isinstance(name,unicode):
++ if not isinstance(name,str):
+ name = name.decode("utf8")
+ if not k.name.endswith(self._separator):
+ if wildcard is not None:
+@@ -733,16 +733,16 @@ class S3FS(FS):
+
+
+ def _eq_utf8(name1,name2):
+- if isinstance(name1,unicode):
++ if isinstance(name1,str):
+ name1 = name1.encode("utf8")
+- if isinstance(name2,unicode):
++ if isinstance(name2,str):
+ name2 = name2.encode("utf8")
+ return name1 == name2
+
+ def _startswith_utf8(name1,name2):
+- if isinstance(name1,unicode):
++ if isinstance(name1,str):
+ name1 = name1.encode("utf8")
+- if isinstance(name2,unicode):
++ if isinstance(name2,str):
+ name2 = name2.encode("utf8")
+ return name1.startswith(name2)
+
+--- fs/sftpfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/sftpfs.py
+@@ -131,7 +131,7 @@ class SFTPFS(FS):
+ self._client = None
+
+ self.hostname = None
+- if isinstance(connection, basestring):
++ if isinstance(connection, str):
+ self.hostname = connection
+ elif isinstance(connection, tuple):
+ self.hostname = '%s:%s' % connection
+@@ -183,7 +183,7 @@ class SFTPFS(FS):
+ if not connection.is_authenticated():
+ try:
+ connection.auth_none(username)
+- except paramiko.BadAuthenticationType, e:
++ except paramiko.BadAuthenticationType as e:
+ self.close()
+ allowed = ', '.join(e.allowed_types)
+ raise RemoteConnectionError(msg='no auth - server requires one of the following: %s' % allowed, details=e)
+@@ -192,14 +192,14 @@ class SFTPFS(FS):
+ self.close()
+ raise RemoteConnectionError(msg='no auth')
+
+- except paramiko.SSHException, e:
++ except paramiko.SSHException as e:
+ self.close()
+ raise RemoteConnectionError(msg='SSH exception (%s)' % str(e), details=e)
+
+ self._transport = connection
+
+ def __unicode__(self):
+- return u'<SFTPFS: %s>' % self.desc('/')
++ return '<SFTPFS: %s>' % self.desc('/')
+
+ @classmethod
+ def _agent_auth(cls, transport, username):
+@@ -307,7 +307,7 @@ class SFTPFS(FS):
+ self.closed = True
+
+ def _normpath(self, path):
+- if not isinstance(path, unicode):
++ if not isinstance(path, str):
+ path = path.decode(self.encoding)
+ npath = pathjoin(self.root_path, relpath(normpath(path)))
+ if not isprefix(self.root_path, npath):
+@@ -355,10 +355,10 @@ class SFTPFS(FS):
+ def desc(self, path):
+ npath = self._normpath(path)
+ if self.hostname:
+- return u'sftp://%s%s' % (self.hostname, path)
++ return 'sftp://%s%s' % (self.hostname, path)
+ else:
+ addr, port = self._transport.getpeername()
+- return u'sftp://%s:%i%s' % (addr, port, self.client.normalize(npath))
++ return 'sftp://%s:%i%s' % (addr, port, self.client.normalize(npath))
+
+ @synchronize
+ @convert_os_errors
+@@ -368,7 +368,7 @@ class SFTPFS(FS):
+ npath = self._normpath(path)
+ try:
+ self.client.stat(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ return False
+ raise
+@@ -382,7 +382,7 @@ class SFTPFS(FS):
+ npath = self._normpath(path)
+ try:
+ stat = self.client.stat(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ return False
+ raise
+@@ -394,7 +394,7 @@ class SFTPFS(FS):
+ npath = self._normpath(path)
+ try:
+ stat = self.client.stat(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ return False
+ raise
+@@ -409,10 +409,10 @@ class SFTPFS(FS):
+ if dirs_only or files_only:
+ attrs = self.client.listdir_attr(npath)
+ attrs_map = dict((a.filename, a) for a in attrs)
+- paths = list(attrs_map.iterkeys())
++ paths = list(attrs_map.keys())
+ else:
+ paths = self.client.listdir(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ if self.isfile(path):
+ raise ResourceInvalidError(path,msg="Can't list directory contents of a file: %(path)s")
+@@ -424,19 +424,19 @@ class SFTPFS(FS):
+ if attrs_map:
+ if dirs_only:
+ filter_paths = []
+- for apath, attr in attrs_map.iteritems():
++ for apath, attr in attrs_map.items():
+ if isdir(self, path, attr.__dict__):
+ filter_paths.append(apath)
+ paths = filter_paths
+ elif files_only:
+ filter_paths = []
+- for apath, attr in attrs_map.iteritems():
++ for apath, attr in attrs_map.items():
+ if isfile(self, apath, attr.__dict__):
+ filter_paths.append(apath)
+ paths = filter_paths
+
+ for (i,p) in enumerate(paths):
+- if not isinstance(p,unicode):
++ if not isinstance(p,str):
+ paths[i] = p.decode(self.encoding)
+
+ return self._listdir_helper(path, paths, wildcard, full, absolute, False, False)
+@@ -448,8 +448,8 @@ class SFTPFS(FS):
+ try:
+ attrs = self.client.listdir_attr(npath)
+ attrs_map = dict((a.filename, a) for a in attrs)
+- paths = attrs_map.keys()
+- except IOError, e:
++ paths = list(attrs_map.keys())
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ if self.isfile(path):
+ raise ResourceInvalidError(path,msg="Can't list directory contents of a file: %(path)s")
+@@ -460,19 +460,19 @@ class SFTPFS(FS):
+
+ if dirs_only:
+ filter_paths = []
+- for path, attr in attrs_map.iteritems():
++ for path, attr in attrs_map.items():
+ if isdir(self, path, attr.__dict__):
+ filter_paths.append(path)
+ paths = filter_paths
+ elif files_only:
+ filter_paths = []
+- for path, attr in attrs_map.iteritems():
++ for path, attr in attrs_map.items():
+ if isfile(self, path, attr.__dict__):
+ filter_paths.append(path)
+ paths = filter_paths
+
+ for (i, p) in enumerate(paths):
+- if not isinstance(p, unicode):
++ if not isinstance(p, str):
+ paths[i] = p.decode(self.encoding)
+
+ def getinfo(p):
+@@ -491,7 +491,7 @@ class SFTPFS(FS):
+ npath = self._normpath(path)
+ try:
+ self.client.mkdir(npath)
+- except IOError, _e:
++ except IOError as _e:
+ # Error code is unreliable, try to figure out what went wrong
+ try:
+ stat = self.client.stat(npath)
+@@ -519,7 +519,7 @@ class SFTPFS(FS):
+ npath = self._normpath(path)
+ try:
+ self.client.remove(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ raise ResourceNotFoundError(path)
+ elif self.isdir(path):
+@@ -542,7 +542,7 @@ class SFTPFS(FS):
+ raise ResourceNotFoundError(path)
+ try:
+ self.client.rmdir(npath)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ if self.isfile(path):
+ raise ResourceInvalidError(path,msg="Can't use removedir() on a file: %(path)s")
+@@ -565,7 +565,7 @@ class SFTPFS(FS):
+ ndst = self._normpath(dst)
+ try:
+ self.client.rename(nsrc,ndst)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ raise ResourceNotFoundError(src)
+ if not self.isdir(dirname(dst)):
+@@ -581,7 +581,7 @@ class SFTPFS(FS):
+ self.remove(dst)
+ try:
+ self.client.rename(nsrc,ndst)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ raise ResourceNotFoundError(src)
+ if self.exists(dst):
+@@ -599,7 +599,7 @@ class SFTPFS(FS):
+ self.removedir(dst)
+ try:
+ self.client.rename(nsrc,ndst)
+- except IOError, e:
++ except IOError as e:
+ if getattr(e,"errno",None) == ENOENT:
+ raise ResourceNotFoundError(src)
+ if self.exists(dst):
+@@ -612,7 +612,7 @@ class SFTPFS(FS):
+ @classmethod
+ def _extract_info(cls, stats):
+ fromtimestamp = datetime.datetime.fromtimestamp
+- info = dict((k, v) for k, v in stats.iteritems() if k in cls._info_vars and not k.startswith('_'))
++ info = dict((k, v) for k, v in stats.items() if k in cls._info_vars and not k.startswith('_'))
+ info['size'] = info['st_size']
+ ct = info.get('st_ctime')
+ if ct is not None:
+--- fs/tempfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/tempfs.py
+@@ -29,7 +29,7 @@ class TempFS(OSFS):
+ _meta['atomic.move'] = True
+ _meta['atomic.copy'] = True
+
+- def __init__(self, identifier=None, temp_dir=None, dir_mode=0700, thread_synchronize=_thread_synchronize_default):
++ def __init__(self, identifier=None, temp_dir=None, dir_mode=0o700, thread_synchronize=_thread_synchronize_default):
+ """Creates a temporary Filesystem
+
+ identifier -- A string that is included in the name of the temporary directory,
+@@ -49,7 +49,7 @@ class TempFS(OSFS):
+ __str__ = __repr__
+
+ def __unicode__(self):
+- return u'<TempFS: %s>' % self._temp_dir
++ return '<TempFS: %s>' % self._temp_dir
+
+ def __getstate__(self):
+ # If we are picking a TempFS, we want to preserve its contents,
+--- fs/tests/__init__.py.orig 2015-11-13 22:33:26 UTC
++++ fs/tests/__init__.py
+@@ -5,8 +5,8 @@
+
+ """
+
+-from __future__ import with_statement
+
++
+ # Send any output from the logging module to stdout, so it will
+ # be captured by nose and reported appropriately
+ import sys
+@@ -61,7 +61,7 @@ class FSTestCases(object):
+ self.assertEqual(self.fs.validatepath('.foo'), None)
+ self.assertEqual(self.fs.validatepath('foo'), None)
+ self.assertEqual(self.fs.validatepath('foo/bar'), None)
+- self.assert_(self.fs.isvalidpath('foo/bar'))
++ self.assertTrue(self.fs.isvalidpath('foo/bar'))
+
+ def test_tree(self):
+ """Test tree print"""
+@@ -79,8 +79,8 @@ class FSTestCases(object):
+ stupid_meta = 'thismetashouldnotexist!"r$$%^&&*()_+'
+ self.assertRaises(NoMetaError, self.fs.getmeta, stupid_meta)
+ self.assertFalse(self.fs.hasmeta(stupid_meta))
+- self.assertEquals(None, self.fs.getmeta(stupid_meta, None))
+- self.assertEquals(3.14, self.fs.getmeta(stupid_meta, 3.14))
++ self.assertEqual(None, self.fs.getmeta(stupid_meta, None))
++ self.assertEqual(3.14, self.fs.getmeta(stupid_meta, 3.14))
+ for meta_name in meta_names:
+ try:
+ meta = self.fs.getmeta(meta_name)
+@@ -101,15 +101,15 @@ class FSTestCases(object):
+ except NoSysPathError:
+ pass
+ else:
+- self.assertTrue(isinstance(syspath, unicode))
++ self.assertTrue(isinstance(syspath, str))
+ syspath = self.fs.getsyspath("/", allow_none=True)
+ if syspath is not None:
+- self.assertTrue(isinstance(syspath, unicode))
++ self.assertTrue(isinstance(syspath, str))
+
+ def test_debug(self):
+ str(self.fs)
+ repr(self.fs)
+- self.assert_(hasattr(self.fs, 'desc'))
++ self.assertTrue(hasattr(self.fs, 'desc'))
+
+ def test_open_on_directory(self):
+ self.fs.makedir("testdir")
+@@ -132,20 +132,20 @@ class FSTestCases(object):
+ f.close()
+ self.assertTrue(self.check("test1.txt"))
+ f = self.fs.open("test1.txt", "rb")
+- self.assertEquals(f.read(), b("testing"))
++ self.assertEqual(f.read(), b("testing"))
+ f.close()
+ f = self.fs.open("test1.txt", "wb")
+ f.write(b("test file overwrite"))
+ f.close()
+ self.assertTrue(self.check("test1.txt"))
+ f = self.fs.open("test1.txt", "rb")
+- self.assertEquals(f.read(), b("test file overwrite"))
++ self.assertEqual(f.read(), b("test file overwrite"))
+ f.close()
+
+ def test_createfile(self):
+ test = b('now with content')
+ self.fs.createfile("test.txt")
+- self.assert_(self.fs.exists("test.txt"))
++ self.assertTrue(self.fs.exists("test.txt"))
+ self.assertEqual(self.fs.getcontents("test.txt", "rb"), b(''))
+ self.fs.setcontents("test.txt", test)
+ self.fs.createfile("test.txt")
+@@ -163,36 +163,36 @@ class FSTestCases(object):
+ def test_setcontents(self):
+ # setcontents() should accept both a string...
+ self.fs.setcontents("hello", b("world"))
+- self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
++ self.assertEqual(self.fs.getcontents("hello", "rb"), b("world"))
+ # ...and a file-like object
+ self.fs.setcontents("hello", StringIO(b("to you, good sir!")))
+- self.assertEquals(self.fs.getcontents(
++ self.assertEqual(self.fs.getcontents(
+ "hello", "rb"), b("to you, good sir!"))
+ # setcontents() should accept both a string...
+ self.fs.setcontents("hello", b("world"), chunk_size=2)
+- self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
++ self.assertEqual(self.fs.getcontents("hello", "rb"), b("world"))
+ # ...and a file-like object
+ self.fs.setcontents("hello", StringIO(
+ b("to you, good sir!")), chunk_size=2)
+- self.assertEquals(self.fs.getcontents(
++ self.assertEqual(self.fs.getcontents(
+ "hello", "rb"), b("to you, good sir!"))
+ self.fs.setcontents("hello", b(""))
+- self.assertEquals(self.fs.getcontents("hello", "rb"), b(""))
++ self.assertEqual(self.fs.getcontents("hello", "rb"), b(""))
+
+ def test_setcontents_async(self):
+ # setcontents() should accept both a string...
+ self.fs.setcontents_async("hello", b("world")).wait()
+- self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
++ self.assertEqual(self.fs.getcontents("hello", "rb"), b("world"))
+ # ...and a file-like object
+ self.fs.setcontents_async("hello", StringIO(
+ b("to you, good sir!"))).wait()
+- self.assertEquals(self.fs.getcontents("hello"), b("to you, good sir!"))
++ self.assertEqual(self.fs.getcontents("hello"), b("to you, good sir!"))
+ self.fs.setcontents_async("hello", b("world"), chunk_size=2).wait()
+- self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
++ self.assertEqual(self.fs.getcontents("hello", "rb"), b("world"))
+ # ...and a file-like object
+ self.fs.setcontents_async("hello", StringIO(
+ b("to you, good sir!")), chunk_size=2).wait()
+- self.assertEquals(self.fs.getcontents(
++ self.assertEqual(self.fs.getcontents(
+ "hello", "rb"), b("to you, good sir!"))
+
+ def test_isdir_isfile(self):
+@@ -214,19 +214,19 @@ class FSTestCases(object):
+ def test_listdir(self):
+ def check_unicode(items):
+ for item in items:
+- self.assertTrue(isinstance(item, unicode))
+- self.fs.setcontents(u"a", b(''))
++ self.assertTrue(isinstance(item, str))
++ self.fs.setcontents("a", b(''))
+ self.fs.setcontents("b", b(''))
+ self.fs.setcontents("foo", b(''))
+ self.fs.setcontents("bar", b(''))
+ # Test listing of the root directory
+ d1 = self.fs.listdir()
+ self.assertEqual(len(d1), 4)
+- self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
++ self.assertEqual(sorted(d1), ["a", "b", "bar", "foo"])
+ check_unicode(d1)
+ d1 = self.fs.listdir("")
+ self.assertEqual(len(d1), 4)
+- self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
++ self.assertEqual(sorted(d1), ["a", "b", "bar", "foo"])
+ check_unicode(d1)
+ d1 = self.fs.listdir("/")
+ self.assertEqual(len(d1), 4)
+@@ -234,7 +234,7 @@ class FSTestCases(object):
+ # Test listing absolute paths
+ d2 = self.fs.listdir(absolute=True)
+ self.assertEqual(len(d2), 4)
+- self.assertEqual(sorted(d2), [u"/a", u"/b", u"/bar", u"/foo"])
++ self.assertEqual(sorted(d2), ["/a", "/b", "/bar", "/foo"])
+ check_unicode(d2)
+ # Create some deeper subdirectories, to make sure their
+ # contents are not inadvertantly included
+@@ -248,25 +248,25 @@ class FSTestCases(object):
+ dirs_only = self.fs.listdir(dirs_only=True)
+ files_only = self.fs.listdir(files_only=True)
+ contains_a = self.fs.listdir(wildcard="*a*")
+- self.assertEqual(sorted(dirs_only), [u"p", u"q"])
+- self.assertEqual(sorted(files_only), [u"a", u"b", u"bar", u"foo"])
+- self.assertEqual(sorted(contains_a), [u"a", u"bar"])
++ self.assertEqual(sorted(dirs_only), ["p", "q"])
++ self.assertEqual(sorted(files_only), ["a", "b", "bar", "foo"])
++ self.assertEqual(sorted(contains_a), ["a", "bar"])
+ check_unicode(dirs_only)
+ check_unicode(files_only)
+ check_unicode(contains_a)
+ # Test listing a subdirectory
+ d3 = self.fs.listdir("p/1/2/3")
+ self.assertEqual(len(d3), 4)
+- self.assertEqual(sorted(d3), [u"a", u"b", u"bar", u"foo"])
++ self.assertEqual(sorted(d3), ["a", "b", "bar", "foo"])
+ check_unicode(d3)
+ # Test listing a subdirectory with absoliute and full paths
+ d4 = self.fs.listdir("p/1/2/3", absolute=True)
+ self.assertEqual(len(d4), 4)
+- self.assertEqual(sorted(d4), [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
++ self.assertEqual(sorted(d4), ["/p/1/2/3/a", "/p/1/2/3/b", "/p/1/2/3/bar", "/p/1/2/3/foo"])
+ check_unicode(d4)
+ d4 = self.fs.listdir("p/1/2/3", full=True)
+ self.assertEqual(len(d4), 4)
+- self.assertEqual(sorted(d4), [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
++ self.assertEqual(sorted(d4), ["p/1/2/3/a", "p/1/2/3/b", "p/1/2/3/bar", "p/1/2/3/foo"])
+ check_unicode(d4)
+ # Test that appropriate errors are raised
+ self.assertRaises(ResourceNotFoundError, self.fs.listdir, "zebra")
+@@ -275,32 +275,32 @@ class FSTestCases(object):
+ def test_listdirinfo(self):
+ def check_unicode(items):
+ for (nm, info) in items:
+- self.assertTrue(isinstance(nm, unicode))
++ self.assertTrue(isinstance(nm, str))
+
+ def check_equal(items, target):
+ names = [nm for (nm, info) in items]
+ self.assertEqual(sorted(names), sorted(target))
+- self.fs.setcontents(u"a", b(''))
++ self.fs.setcontents("a", b(''))
+ self.fs.setcontents("b", b(''))
+ self.fs.setcontents("foo", b(''))
+ self.fs.setcontents("bar", b(''))
+ # Test listing of the root directory
+ d1 = self.fs.listdirinfo()
+ self.assertEqual(len(d1), 4)
+- check_equal(d1, [u"a", u"b", u"bar", u"foo"])
++ check_equal(d1, ["a", "b", "bar", "foo"])
+ check_unicode(d1)
+ d1 = self.fs.listdirinfo("")
+ self.assertEqual(len(d1), 4)
+- check_equal(d1, [u"a", u"b", u"bar", u"foo"])
++ check_equal(d1, ["a", "b", "bar", "foo"])
+ check_unicode(d1)
+ d1 = self.fs.listdirinfo("/")
+ self.assertEqual(len(d1), 4)
+- check_equal(d1, [u"a", u"b", u"bar", u"foo"])
++ check_equal(d1, ["a", "b", "bar", "foo"])
+ check_unicode(d1)
+ # Test listing absolute paths
+ d2 = self.fs.listdirinfo(absolute=True)
+ self.assertEqual(len(d2), 4)
+- check_equal(d2, [u"/a", u"/b", u"/bar", u"/foo"])
++ check_equal(d2, ["/a", "/b", "/bar", "/foo"])
+ check_unicode(d2)
+ # Create some deeper subdirectories, to make sure their
+ # contents are not inadvertantly included
+@@ -314,25 +314,25 @@ class FSTestCases(object):
+ dirs_only = self.fs.listdirinfo(dirs_only=True)
+ files_only = self.fs.listdirinfo(files_only=True)
+ contains_a = self.fs.listdirinfo(wildcard="*a*")
+- check_equal(dirs_only, [u"p", u"q"])
+- check_equal(files_only, [u"a", u"b", u"bar", u"foo"])
+- check_equal(contains_a, [u"a", u"bar"])
++ check_equal(dirs_only, ["p", "q"])
++ check_equal(files_only, ["a", "b", "bar", "foo"])
++ check_equal(contains_a, ["a", "bar"])
+ check_unicode(dirs_only)
+ check_unicode(files_only)
+ check_unicode(contains_a)
+ # Test listing a subdirectory
+ d3 = self.fs.listdirinfo("p/1/2/3")
+ self.assertEqual(len(d3), 4)
+- check_equal(d3, [u"a", u"b", u"bar", u"foo"])
++ check_equal(d3, ["a", "b", "bar", "foo"])
+ check_unicode(d3)
+ # Test listing a subdirectory with absoliute and full paths
+ d4 = self.fs.listdirinfo("p/1/2/3", absolute=True)
+ self.assertEqual(len(d4), 4)
+- check_equal(d4, [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
++ check_equal(d4, ["/p/1/2/3/a", "/p/1/2/3/b", "/p/1/2/3/bar", "/p/1/2/3/foo"])
+ check_unicode(d4)
+ d4 = self.fs.listdirinfo("p/1/2/3", full=True)
+ self.assertEqual(len(d4), 4)
+- check_equal(d4, [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
++ check_equal(d4, ["p/1/2/3/a", "p/1/2/3/b", "p/1/2/3/bar", "p/1/2/3/foo"])
+ check_unicode(d4)
+ # Test that appropriate errors are raised
+ self.assertRaises(ResourceNotFoundError, self.fs.listdirinfo, "zebra")
+@@ -343,7 +343,7 @@ class FSTestCases(object):
+ self.fs.setcontents('b.txt', b('world'))
+ self.fs.makeopendir('foo').setcontents('c', b('123'))
+ sorted_walk = sorted([(d, sorted(fs)) for (d, fs) in self.fs.walk()])
+- self.assertEquals(sorted_walk,
++ self.assertEqual(sorted_walk,
+ [("/", ["a.txt", "b.txt"]),
+ ("/foo", ["c"])])
+ # When searching breadth-first, shallow entries come first
+@@ -371,10 +371,10 @@ class FSTestCases(object):
+ self.fs.makeopendir('.svn').setcontents('ignored', b(''))
+ for dir_path, paths in self.fs.walk(wildcard='*.txt'):
+ for path in paths:
+- self.assert_(path.endswith('.txt'))
++ self.assertTrue(path.endswith('.txt'))
+ for dir_path, paths in self.fs.walk(wildcard=lambda fn: fn.endswith('.txt')):
+ for path in paths:
+- self.assert_(path.endswith('.txt'))
++ self.assertTrue(path.endswith('.txt'))
+
+ def test_walk_dir_wildcard(self):
+ self.fs.setcontents('a.txt', b('hello'))
+@@ -383,35 +383,35 @@ class FSTestCases(object):
+ self.fs.makeopendir('.svn').setcontents('ignored', b(''))
+ for dir_path, paths in self.fs.walk(dir_wildcard=lambda fn: not fn.endswith('.svn')):
+ for path in paths:
+- self.assert_('.svn' not in path)
++ self.assertTrue('.svn' not in path)
+
+ def test_walkfiles(self):
+ self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
+ self.fs.makeopendir('foo').setcontents('b', b('123'))
+- self.assertEquals(sorted(
++ self.assertEqual(sorted(
+ self.fs.walkfiles()), ["/bar/a.txt", "/foo/b"])
+- self.assertEquals(sorted(self.fs.walkfiles(
++ self.assertEqual(sorted(self.fs.walkfiles(
+ dir_wildcard="*foo*")), ["/foo/b"])
+- self.assertEquals(sorted(self.fs.walkfiles(
++ self.assertEqual(sorted(self.fs.walkfiles(
+ wildcard="*.txt")), ["/bar/a.txt"])
+
+ def test_walkdirs(self):
+ self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
+ self.fs.makeopendir('foo').makeopendir(
+ "baz").setcontents('b', b('123'))
+- self.assertEquals(sorted(self.fs.walkdirs()), [
++ self.assertEqual(sorted(self.fs.walkdirs()), [
+ "/", "/bar", "/foo", "/foo/baz"])
+- self.assertEquals(sorted(self.fs.walkdirs(
++ self.assertEqual(sorted(self.fs.walkdirs(
+ wildcard="*foo*")), ["/", "/foo", "/foo/baz"])
+
+ def test_unicode(self):
+- alpha = u"\N{GREEK SMALL LETTER ALPHA}"
+- beta = u"\N{GREEK SMALL LETTER BETA}"
++ alpha = "\N{GREEK SMALL LETTER ALPHA}"
++ beta = "\N{GREEK SMALL LETTER BETA}"
+ self.fs.makedir(alpha)
+ self.fs.setcontents(alpha + "/a", b(''))
+ self.fs.setcontents(alpha + "/" + beta, b(''))
+ self.assertTrue(self.check(alpha))
+- self.assertEquals(sorted(self.fs.listdir(alpha)), ["a", beta])
++ self.assertEqual(sorted(self.fs.listdir(alpha)), ["a", beta])
+
+ def test_makedir(self):
+ check = self.check
+@@ -420,11 +420,11 @@ class FSTestCases(object):
+ self.assertRaises(
+ ParentDirectoryMissingError, self.fs.makedir, "a/b/c")
+ self.fs.makedir("a/b/c", recursive=True)
+- self.assert_(check("a/b/c"))
++ self.assertTrue(check("a/b/c"))
+ self.fs.makedir("foo/bar/baz", recursive=True)
+- self.assert_(check("foo/bar/baz"))
++ self.assertTrue(check("foo/bar/baz"))
+ self.fs.makedir("a/b/child")
+- self.assert_(check("a/b/child"))
++ self.assertTrue(check("a/b/child"))
+ self.assertRaises(DestinationExistsError, self.fs.makedir, "/a/b")
+ self.fs.makedir("/a/b", allow_recreate=True)
+ self.fs.setcontents("/a/file", b(''))
+@@ -446,30 +446,30 @@ class FSTestCases(object):
+ def test_removedir(self):
+ check = self.check
+ self.fs.makedir("a")
+- self.assert_(check("a"))
++ self.assertTrue(check("a"))
+ self.fs.removedir("a")
+ self.assertRaises(ResourceNotFoundError, self.fs.removedir, "a")
+- self.assert_(not check("a"))
++ self.assertTrue(not check("a"))
+ self.fs.makedir("a/b/c/d", recursive=True)
+ self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "a/b")
+ self.fs.removedir("a/b/c/d")
+- self.assert_(not check("a/b/c/d"))
++ self.assertTrue(not check("a/b/c/d"))
+ self.fs.removedir("a/b/c")
+- self.assert_(not check("a/b/c"))
++ self.assertTrue(not check("a/b/c"))
+ self.fs.removedir("a/b")
+- self.assert_(not check("a/b"))
++ self.assertTrue(not check("a/b"))
+ # Test recursive removal of empty parent dirs
+ self.fs.makedir("foo/bar/baz", recursive=True)
+ self.fs.removedir("foo/bar/baz", recursive=True)
+- self.assert_(not check("foo/bar/baz"))
+- self.assert_(not check("foo/bar"))
+- self.assert_(not check("foo"))
++ self.assertTrue(not check("foo/bar/baz"))
++ self.assertTrue(not check("foo/bar"))
++ self.assertTrue(not check("foo"))
+ self.fs.makedir("foo/bar/baz", recursive=True)
+ self.fs.setcontents("foo/file.txt", b("please don't delete me"))
+ self.fs.removedir("foo/bar/baz", recursive=True)
+- self.assert_(not check("foo/bar/baz"))
+- self.assert_(not check("foo/bar"))
+- self.assert_(check("foo/file.txt"))
++ self.assertTrue(not check("foo/bar/baz"))
++ self.assertTrue(not check("foo/bar"))
++ self.assertTrue(check("foo/file.txt"))
+ # Ensure that force=True works as expected
+ self.fs.makedir("frollic/waggle", recursive=True)
+ self.fs.setcontents("frollic/waddle.txt", b("waddlewaddlewaddle"))
+@@ -477,41 +477,41 @@ class FSTestCases(object):
+ self.assertRaises(
+ ResourceInvalidError, self.fs.removedir, "frollic/waddle.txt")
+ self.fs.removedir("frollic", force=True)
+- self.assert_(not check("frollic"))
++ self.assertTrue(not check("frollic"))
+ # Test removing unicode dirs
+- kappa = u"\N{GREEK CAPITAL LETTER KAPPA}"
++ kappa = "\N{GREEK CAPITAL LETTER KAPPA}"
+ self.fs.makedir(kappa)
+- self.assert_(self.fs.isdir(kappa))
++ self.assertTrue(self.fs.isdir(kappa))
+ self.fs.removedir(kappa)
+ self.assertRaises(ResourceNotFoundError, self.fs.removedir, kappa)
+- self.assert_(not self.fs.isdir(kappa))
++ self.assertTrue(not self.fs.isdir(kappa))
+ self.fs.makedir(pathjoin("test", kappa), recursive=True)
+- self.assert_(check(pathjoin("test", kappa)))
++ self.assertTrue(check(pathjoin("test", kappa)))
+ self.fs.removedir("test", force=True)
+- self.assert_(not check("test"))
++ self.assertTrue(not check("test"))
+
+ def test_rename(self):
+ check = self.check
+ # test renaming a file in the same directory
+ self.fs.setcontents("foo.txt", b("Hello, World!"))
+- self.assert_(check("foo.txt"))
++ self.assertTrue(check("foo.txt"))
+ self.fs.rename("foo.txt", "bar.txt")
+- self.assert_(check("bar.txt"))
+- self.assert_(not check("foo.txt"))
++ self.assertTrue(check("bar.txt"))
++ self.assertTrue(not check("foo.txt"))
+ # test renaming a directory in the same directory
+ self.fs.makedir("dir_a")
+ self.fs.setcontents("dir_a/test.txt", b("testerific"))
+- self.assert_(check("dir_a"))
++ self.assertTrue(check("dir_a"))
+ self.fs.rename("dir_a", "dir_b")
+- self.assert_(check("dir_b"))
+- self.assert_(check("dir_b/test.txt"))
+- self.assert_(not check("dir_a/test.txt"))
+- self.assert_(not check("dir_a"))
++ self.assertTrue(check("dir_b"))
++ self.assertTrue(check("dir_b/test.txt"))
++ self.assertTrue(not check("dir_a/test.txt"))
++ self.assertTrue(not check("dir_a"))
+ # test renaming a file into a different directory
+ self.fs.makedir("dir_a")
+ self.fs.rename("dir_b/test.txt", "dir_a/test.txt")
+- self.assert_(not check("dir_b/test.txt"))
+- self.assert_(check("dir_a/test.txt"))
++ self.assertTrue(not check("dir_b/test.txt"))
++ self.assertTrue(check("dir_a/test.txt"))
+ # test renaming a file into a non-existent directory
+ self.assertRaises(ParentDirectoryMissingError,
+ self.fs.rename, "dir_a/test.txt", "nonexistent/test.txt")
+@@ -530,7 +530,7 @@ class FSTestCases(object):
+ test_str = b("Hello, World!")
+ self.fs.setcontents("info.txt", test_str)
+ info = self.fs.getinfo("info.txt")
+- for k, v in info.iteritems():
++ for k, v in info.items():
+ self.assertEqual(self.fs.getinfokeys('info.txt', k), {k: v})
+
+ test_info = {}
+@@ -562,26 +562,26 @@ class FSTestCases(object):
+
+ self.fs.makedir("foo/bar", recursive=True)
+ makefile("foo/bar/a.txt")
+- self.assert_(check("foo/bar/a.txt"))
+- self.assert_(checkcontents("foo/bar/a.txt"))
++ self.assertTrue(check("foo/bar/a.txt"))
++ self.assertTrue(checkcontents("foo/bar/a.txt"))
+ self.fs.move("foo/bar/a.txt", "foo/b.txt")
+- self.assert_(not check("foo/bar/a.txt"))
+- self.assert_(check("foo/b.txt"))
+- self.assert_(checkcontents("foo/b.txt"))
++ self.assertTrue(not check("foo/bar/a.txt"))
++ self.assertTrue(check("foo/b.txt"))
++ self.assertTrue(checkcontents("foo/b.txt"))
+
+ self.fs.move("foo/b.txt", "c.txt")
+- self.assert_(not check("foo/b.txt"))
+- self.assert_(check("/c.txt"))
+- self.assert_(checkcontents("/c.txt"))
++ self.assertTrue(not check("foo/b.txt"))
++ self.assertTrue(check("/c.txt"))
++ self.assertTrue(checkcontents("/c.txt"))
+
+ makefile("foo/bar/a.txt")
+ self.assertRaises(
+ DestinationExistsError, self.fs.move, "foo/bar/a.txt", "/c.txt")
+- self.assert_(check("foo/bar/a.txt"))
+- self.assert_(check("/c.txt"))
++ self.assertTrue(check("foo/bar/a.txt"))
++ self.assertTrue(check("/c.txt"))
+ self.fs.move("foo/bar/a.txt", "/c.txt", overwrite=True)
+- self.assert_(not check("foo/bar/a.txt"))
+- self.assert_(check("/c.txt"))
++ self.assertTrue(not check("foo/bar/a.txt"))
++ self.assertTrue(check("/c.txt"))
+
+ def test_movedir(self):
+ check = self.check
+@@ -602,29 +602,29 @@ class FSTestCases(object):
+
+ self.fs.movedir("a", "copy of a")
+
+- self.assert_(self.fs.isdir("copy of a"))
+- self.assert_(check("copy of a/1.txt"))
+- self.assert_(check("copy of a/2.txt"))
+- self.assert_(check("copy of a/3.txt"))
+- self.assert_(check("copy of a/foo/bar/baz.txt"))
++ self.assertTrue(self.fs.isdir("copy of a"))
++ self.assertTrue(check("copy of a/1.txt"))
++ self.assertTrue(check("copy of a/2.txt"))
++ self.assertTrue(check("copy of a/3.txt"))
++ self.assertTrue(check("copy of a/foo/bar/baz.txt"))
+
+- self.assert_(not check("a/1.txt"))
+- self.assert_(not check("a/2.txt"))
+- self.assert_(not check("a/3.txt"))
+- self.assert_(not check("a/foo/bar/baz.txt"))
+- self.assert_(not check("a/foo/bar"))
+- self.assert_(not check("a/foo"))
+- self.assert_(not check("a"))
++ self.assertTrue(not check("a/1.txt"))
++ self.assertTrue(not check("a/2.txt"))
++ self.assertTrue(not check("a/3.txt"))
++ self.assertTrue(not check("a/foo/bar/baz.txt"))
++ self.assertTrue(not check("a/foo/bar"))
++ self.assertTrue(not check("a/foo"))
++ self.assertTrue(not check("a"))
+
+ self.fs.makedir("a")
+ self.assertRaises(
+ DestinationExistsError, self.fs.movedir, "copy of a", "a")
+ self.fs.movedir("copy of a", "a", overwrite=True)
+- self.assert_(not check("copy of a"))
+- self.assert_(check("a/1.txt"))
+- self.assert_(check("a/2.txt"))
+- self.assert_(check("a/3.txt"))
+- self.assert_(check("a/foo/bar/baz.txt"))
++ self.assertTrue(not check("copy of a"))
++ self.assertTrue(check("a/1.txt"))
++ self.assertTrue(check("a/2.txt"))
++ self.assertTrue(check("a/3.txt"))
++ self.assertTrue(check("a/foo/bar/baz.txt"))
+
+ def test_cant_copy_from_os(self):
+ sys_executable = os.path.abspath(os.path.realpath(sys.executable))
+@@ -645,28 +645,28 @@ class FSTestCases(object):
+
+ self.fs.makedir("foo/bar", recursive=True)
+ makefile("foo/bar/a.txt")
+- self.assert_(check("foo/bar/a.txt"))
+- self.assert_(checkcontents("foo/bar/a.txt"))
++ self.assertTrue(check("foo/bar/a.txt"))
++ self.assertTrue(checkcontents("foo/bar/a.txt"))
+ # import rpdb2; rpdb2.start_embedded_debugger('password');
+ self.fs.copy("foo/bar/a.txt", "foo/b.txt")
+- self.assert_(check("foo/bar/a.txt"))
+- self.assert_(check("foo/b.txt"))
+- self.assert_(checkcontents("foo/bar/a.txt"))
+- self.assert_(checkcontents("foo/b.txt"))
++ self.assertTrue(check("foo/bar/a.txt"))
++ self.assertTrue(check("foo/b.txt"))
++ self.assertTrue(checkcontents("foo/bar/a.txt"))
++ self.assertTrue(checkcontents("foo/b.txt"))
+
+ self.fs.copy("foo/b.txt", "c.txt")
+- self.assert_(check("foo/b.txt"))
+- self.assert_(check("/c.txt"))
+- self.assert_(checkcontents("/c.txt"))
++ self.assertTrue(check("foo/b.txt"))
++ self.assertTrue(check("/c.txt"))
++ self.assertTrue(checkcontents("/c.txt"))
+
+ makefile("foo/bar/a.txt", b("different contents"))
+- self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
++ self.assertTrue(checkcontents("foo/bar/a.txt", b("different contents")))
+ self.assertRaises(
+ DestinationExistsError, self.fs.copy, "foo/bar/a.txt", "/c.txt")
+- self.assert_(checkcontents("/c.txt"))
++ self.assertTrue(checkcontents("/c.txt"))
+ self.fs.copy("foo/bar/a.txt", "/c.txt", overwrite=True)
+- self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
+- self.assert_(checkcontents("/c.txt", b("different contents")))
++ self.assertTrue(checkcontents("foo/bar/a.txt", b("different contents")))
++ self.assertTrue(checkcontents("/c.txt", b("different contents")))
+
+ def test_copydir(self):
+ check = self.check
+@@ -690,24 +690,24 @@ class FSTestCases(object):
+ makefile("a/foo/bar/baz.txt")
+
+ self.fs.copydir("a", "copy of a")
+- self.assert_(check("copy of a/1.txt"))
+- self.assert_(check("copy of a/2.txt"))
+- self.assert_(check("copy of a/3.txt"))
+- self.assert_(check("copy of a/foo/bar/baz.txt"))
++ self.assertTrue(check("copy of a/1.txt"))
++ self.assertTrue(check("copy of a/2.txt"))
++ self.assertTrue(check("copy of a/3.txt"))
++ self.assertTrue(check("copy of a/foo/bar/baz.txt"))
+ checkcontents("copy of a/1.txt")
+
+- self.assert_(check("a/1.txt"))
+- self.assert_(check("a/2.txt"))
+- self.assert_(check("a/3.txt"))
+- self.assert_(check("a/foo/bar/baz.txt"))
++ self.assertTrue(check("a/1.txt"))
++ self.assertTrue(check("a/2.txt"))
++ self.assertTrue(check("a/3.txt"))
++ self.assertTrue(check("a/foo/bar/baz.txt"))
+ checkcontents("a/1.txt")
+
+ self.assertRaises(DestinationExistsError, self.fs.copydir, "a", "b")
+ self.fs.copydir("a", "b", overwrite=True)
+- self.assert_(check("b/1.txt"))
+- self.assert_(check("b/2.txt"))
+- self.assert_(check("b/3.txt"))
+- self.assert_(check("b/foo/bar/baz.txt"))
++ self.assertTrue(check("b/1.txt"))
++ self.assertTrue(check("b/2.txt"))
++ self.assertTrue(check("b/3.txt"))
++ self.assertTrue(check("b/foo/bar/baz.txt"))
+ checkcontents("b/1.txt")
+
+ def test_copydir_with_dotfile(self):
+@@ -724,13 +724,13 @@ class FSTestCases(object):
+ makefile("a/.hidden.txt")
+
+ self.fs.copydir("a", "copy of a")
+- self.assert_(check("copy of a/1.txt"))
+- self.assert_(check("copy of a/2.txt"))
+- self.assert_(check("copy of a/.hidden.txt"))
++ self.assertTrue(check("copy of a/1.txt"))
++ self.assertTrue(check("copy of a/2.txt"))
++ self.assertTrue(check("copy of a/.hidden.txt"))
+
+- self.assert_(check("a/1.txt"))
+- self.assert_(check("a/2.txt"))
+- self.assert_(check("a/.hidden.txt"))
++ self.assertTrue(check("a/1.txt"))
++ self.assertTrue(check("a/2.txt"))
++ self.assertTrue(check("a/.hidden.txt"))
+
+ def test_readwriteappendseek(self):
+ def checkcontents(path, check_contents):
+@@ -743,7 +743,7 @@ class FSTestCases(object):
+ all_strings = b("").join(test_strings)
+
+ self.assertRaises(ResourceNotFoundError, self.fs.open, "a.txt", "r")
+- self.assert_(not self.fs.exists("a.txt"))
++ self.assertTrue(not self.fs.exists("a.txt"))
+ f1 = self.fs.open("a.txt", "wb")
+ pos = 0
+ for s in test_strings:
+@@ -751,26 +751,26 @@ class FSTestCases(object):
+ pos += len(s)
+ self.assertEqual(pos, f1.tell())
+ f1.close()
+- self.assert_(self.fs.exists("a.txt"))
+- self.assert_(checkcontents("a.txt", all_strings))
++ self.assertTrue(self.fs.exists("a.txt"))
++ self.assertTrue(checkcontents("a.txt", all_strings))
+
+ f2 = self.fs.open("b.txt", "wb")
+ f2.write(test_strings[0])
+ f2.close()
+- self.assert_(checkcontents("b.txt", test_strings[0]))
++ self.assertTrue(checkcontents("b.txt", test_strings[0]))
+ f3 = self.fs.open("b.txt", "ab")
+ # On win32, tell() gives zero until you actually write to the file
+ # self.assertEquals(f3.tell(),len(test_strings[0]))
+ f3.write(test_strings[1])
+- self.assertEquals(f3.tell(), len(test_strings[0])+len(test_strings[1]))
++ self.assertEqual(f3.tell(), len(test_strings[0])+len(test_strings[1]))
+ f3.write(test_strings[2])
+- self.assertEquals(f3.tell(), len(all_strings))
++ self.assertEqual(f3.tell(), len(all_strings))
+ f3.close()
+- self.assert_(checkcontents("b.txt", all_strings))
++ self.assertTrue(checkcontents("b.txt", all_strings))
+ f4 = self.fs.open("b.txt", "wb")
+ f4.write(test_strings[2])
+ f4.close()
+- self.assert_(checkcontents("b.txt", test_strings[2]))
++ self.assertTrue(checkcontents("b.txt", test_strings[2]))
+ f5 = self.fs.open("c.txt", "wb")
+ for s in test_strings:
+ f5.write(s+b("\n"))
+@@ -815,7 +815,7 @@ class FSTestCases(object):
+ with self.fs.open("hello", "wb") as f:
+ f.truncate(30)
+
+- self.assertEquals(self.fs.getsize("hello"), 30)
++ self.assertEqual(self.fs.getsize("hello"), 30)
+
+ # Some file systems (FTPFS) don't support both reading and writing
+ if self.fs.getmeta('file.read_and_write', True):
+@@ -825,7 +825,7 @@ class FSTestCases(object):
+
+ with self.fs.open("hello", "rb") as f:
+ f.seek(25)
+- self.assertEquals(f.read(), b("123456"))
++ self.assertEqual(f.read(), b("123456"))
+
+ def test_write_past_end_of_file(self):
+ if self.fs.getmeta('file.read_and_write', True):
+@@ -833,7 +833,7 @@ class FSTestCases(object):
+ f.seek(25)
+ f.write(b("EOF"))
+ with self.fs.open("write_at_end", "rb") as f:
+- self.assertEquals(f.read(), b("\x00")*25 + b("EOF"))
++ self.assertEqual(f.read(), b("\x00")*25 + b("EOF"))
+
+ def test_with_statement(self):
+ # This is a little tricky since 'with' is actually new syntax.
+@@ -856,15 +856,15 @@ class FSTestCases(object):
+ code += " raise ValueError\n"
+ code = compile(code, "<string>", 'exec')
+ self.assertRaises(ValueError, eval, code, globals(), locals())
+- self.assertEquals(self.fs.getcontents('f.txt', 'rb'), contents)
++ self.assertEqual(self.fs.getcontents('f.txt', 'rb'), contents)
+
+ def test_pickling(self):
+ if self.fs.getmeta('pickle_contents', True):
+ self.fs.setcontents("test1", b("hello world"))
+ fs2 = pickle.loads(pickle.dumps(self.fs))
+- self.assert_(fs2.isfile("test1"))
++ self.assertTrue(fs2.isfile("test1"))
+ fs3 = pickle.loads(pickle.dumps(self.fs, -1))
+- self.assert_(fs3.isfile("test1"))
++ self.assertTrue(fs3.isfile("test1"))
+ else:
+ # Just make sure it doesn't throw an exception
+ fs2 = pickle.loads(pickle.dumps(self.fs))
+@@ -879,9 +879,9 @@ class FSTestCases(object):
+ r = random.Random(0)
+ randint = r.randint
+ int2byte = six.int2byte
+- for _i in xrange(num_chunks):
++ for _i in range(num_chunks):
+ c = b("").join(int2byte(randint(
+- 0, 255)) for _j in xrange(chunk_size//8))
++ 0, 255)) for _j in range(chunk_size//8))
+ yield c * 8
+ f = self.fs.open("bigfile", "wb")
+ try:
+@@ -894,7 +894,7 @@ class FSTestCases(object):
+ try:
+ try:
+ while True:
+- if chunks.next() != f.read(chunk_size):
++ if next(chunks) != f.read(chunk_size):
+ assert False, "bigfile was corrupted"
+ except StopIteration:
+ if f.read() != b(""):
+@@ -929,9 +929,9 @@ class FSTestCases(object):
+ """Test read(0) returns empty string"""
+ self.fs.setcontents('foo.txt', b('Hello, World'))
+ with self.fs.open('foo.txt', 'rb') as f:
+- self.assert_(len(f.read(0)) == 0)
++ self.assertTrue(len(f.read(0)) == 0)
+ with self.fs.open('foo.txt', 'rt') as f:
+- self.assert_(len(f.read(0)) == 0)
++ self.assertTrue(len(f.read(0)) == 0)
+
+ # May be disabled - see end of file
+
+@@ -977,7 +977,7 @@ class ThreadingTestCases(object):
+ for t in threads:
+ t.join()
+ for (c, e, t) in errors:
+- raise e, None, t
++ raise e.with_traceback(t)
+ finally:
+ sys.setcheckinterval(check_interval)
+
+@@ -994,12 +994,12 @@ class ThreadingTestCases(object):
+ def thread1():
+ c = b("thread1 was 'ere")
+ setcontents("thread1.txt", c)
+- self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'), c)
++ self.assertEqual(self.fs.getcontents("thread1.txt", 'rb'), c)
+
+ def thread2():
+ c = b("thread2 was 'ere")
+ setcontents("thread2.txt", c)
+- self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'), c)
++ self.assertEqual(self.fs.getcontents("thread2.txt", 'rb'), c)
+ self._runThreads(thread1, thread2)
+
+ def test_setcontents_threaded_samefile(self):
+@@ -1016,19 +1016,19 @@ class ThreadingTestCases(object):
+ c = b("thread1 was 'ere")
+ setcontents("threads.txt", c)
+ self._yield()
+- self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
++ self.assertEqual(self.fs.listdir("/"), ["threads.txt"])
+
+ def thread2():
+ c = b("thread2 was 'ere")
+ setcontents("threads.txt", c)
+ self._yield()
+- self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
++ self.assertEqual(self.fs.listdir("/"), ["threads.txt"])
+
+ def thread3():
+ c = b("thread3 was 'ere")
+ setcontents("threads.txt", c)
+ self._yield()
+- self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
++ self.assertEqual(self.fs.listdir("/"), ["threads.txt"])
+ try:
+ self._runThreads(thread1, thread2, thread3)
+ except ResourceLockedError:
+@@ -1079,23 +1079,23 @@ class ThreadingTestCases(object):
+ def makedir():
+ try:
+ self.fs.makedir("testdir")
+- except DestinationExistsError, e:
++ except DestinationExistsError as e:
+ errors.append(e)
+
+ def makedir_noerror():
+ try:
+ self.fs.makedir("testdir", allow_recreate=True)
+- except DestinationExistsError, e:
++ except DestinationExistsError as e:
+ errors.append(e)
+
+ def removedir():
+ try:
+ self.fs.removedir("testdir")
+- except (ResourceNotFoundError, ResourceLockedError), e:
++ except (ResourceNotFoundError, ResourceLockedError) as e:
+ errors.append(e)
+ # One thread should succeed, one should error
+ self._runThreads(makedir, makedir)
+- self.assertEquals(len(errors), 1)
++ self.assertEqual(len(errors), 1)
+ self.fs.removedir("testdir")
+ # One thread should succeed, two should error
+ errors = []
+@@ -1106,18 +1106,18 @@ class ThreadingTestCases(object):
+ # All threads should succeed
+ errors = []
+ self._runThreads(makedir_noerror, makedir_noerror, makedir_noerror)
+- self.assertEquals(len(errors), 0)
++ self.assertEqual(len(errors), 0)
+ self.assertTrue(self.fs.isdir("testdir"))
+ self.fs.removedir("testdir")
+ # makedir() can beat removedir() and vice-versa
+ errors = []
+ self._runThreads(makedir, removedir)
+ if self.fs.isdir("testdir"):
+- self.assertEquals(len(errors), 1)
++ self.assertEqual(len(errors), 1)
+ self.assertFalse(isinstance(errors[0], DestinationExistsError))
+ self.fs.removedir("testdir")
+ else:
+- self.assertEquals(len(errors), 0)
++ self.assertEqual(len(errors), 0)
+
+ def test_concurrent_copydir(self):
+ self.fs.makedir("a")
+@@ -1136,10 +1136,10 @@ class ThreadingTestCases(object):
+ # This should error out since we're not overwriting
+ self.assertRaises(
+ DestinationExistsError, self._runThreads, copydir, copydir)
+- self.assert_(self.fs.isdir('a'))
+- self.assert_(self.fs.isdir('a'))
++ self.assertTrue(self.fs.isdir('a'))
++ self.assertTrue(self.fs.isdir('a'))
+ copydir_overwrite()
+- self.assert_(self.fs.isdir('a'))
++ self.assertTrue(self.fs.isdir('a'))
+ # This should run to completion and give a valid state, unless
+ # files get locked when written to.
+ try:
+@@ -1160,19 +1160,19 @@ class ThreadingTestCases(object):
+ "contents the second"), b("number three")]
+
+ def thread1():
+- for i in xrange(30):
++ for i in range(30):
+ for c in contents:
+ self.fs.setcontents("thread1.txt", c)
+- self.assertEquals(self.fs.getsize("thread1.txt"), len(c))
+- self.assertEquals(self.fs.getcontents(
++ self.assertEqual(self.fs.getsize("thread1.txt"), len(c))
++ self.assertEqual(self.fs.getcontents(
+ "thread1.txt", 'rb'), c)
+
+ def thread2():
+- for i in xrange(30):
++ for i in range(30):
+ for c in contents:
+ self.fs.setcontents("thread2.txt", c)
+- self.assertEquals(self.fs.getsize("thread2.txt"), len(c))
+- self.assertEquals(self.fs.getcontents(
++ self.assertEqual(self.fs.getsize("thread2.txt"), len(c))
++ self.assertEqual(self.fs.getcontents(
+ "thread2.txt", 'rb'), c)
+ self._runThreads(thread1, thread2)
+
+--- fs/tests/test_archivefs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_archivefs.py
+@@ -58,7 +58,7 @@ class TestReadArchiveFS(unittest.TestCase):
+ contents = f.read()
+ return contents
+ def check_contents(path, expected):
+- self.assert_(read_contents(path)==expected)
++ self.assertTrue(read_contents(path)==expected)
+ check_contents("a.txt", b("Hello, World!"))
+ check_contents("1.txt", b("1"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+@@ -67,29 +67,29 @@ class TestReadArchiveFS(unittest.TestCase):
+ def read_contents(path):
+ return self.fs.getcontents(path)
+ def check_contents(path, expected):
+- self.assert_(read_contents(path)==expected)
++ self.assertTrue(read_contents(path)==expected)
+ check_contents("a.txt", b("Hello, World!"))
+ check_contents("1.txt", b("1"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+
+ def test_is(self):
+- self.assert_(self.fs.isfile('a.txt'))
+- self.assert_(self.fs.isfile('1.txt'))
+- self.assert_(self.fs.isfile('foo/bar/baz.txt'))
+- self.assert_(self.fs.isdir('foo'))
+- self.assert_(self.fs.isdir('foo/bar'))
+- self.assert_(self.fs.exists('a.txt'))
+- self.assert_(self.fs.exists('1.txt'))
+- self.assert_(self.fs.exists('foo/bar/baz.txt'))
+- self.assert_(self.fs.exists('foo'))
+- self.assert_(self.fs.exists('foo/bar'))
++ self.assertTrue(self.fs.isfile('a.txt'))
++ self.assertTrue(self.fs.isfile('1.txt'))
++ self.assertTrue(self.fs.isfile('foo/bar/baz.txt'))
++ self.assertTrue(self.fs.isdir('foo'))
++ self.assertTrue(self.fs.isdir('foo/bar'))
++ self.assertTrue(self.fs.exists('a.txt'))
++ self.assertTrue(self.fs.exists('1.txt'))
++ self.assertTrue(self.fs.exists('foo/bar/baz.txt'))
++ self.assertTrue(self.fs.exists('foo'))
++ self.assertTrue(self.fs.exists('foo/bar'))
+
+ def test_listdir(self):
+ def check_listing(path, expected):
+ dir_list = self.fs.listdir(path)
+- self.assert_(sorted(dir_list) == sorted(expected))
++ self.assertTrue(sorted(dir_list) == sorted(expected))
+ for item in dir_list:
+- self.assert_(isinstance(item,unicode))
++ self.assertTrue(isinstance(item,str))
+ check_listing('/', ['a.txt', '1.txt', 'foo', 'b.txt'])
+ check_listing('foo', ['second.txt', 'bar'])
+ check_listing('foo/bar', ['baz.txt'])
+@@ -114,7 +114,7 @@ class TestWriteArchiveFS(unittest.TestCase):
+
+ makefile("a.txt", b("Hello, World!"))
+ makefile("b.txt", b("b"))
+- makefile(u"\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
++ makefile("\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
+ makefile("foo/bar/baz.txt", b("baz"))
+ makefile("foo/second.txt", b("hai"))
+
+@@ -125,7 +125,7 @@ class TestWriteArchiveFS(unittest.TestCase):
+
+ def test_valid(self):
+ zf = zipfile.ZipFile(self.temp_filename, "r")
+- self.assert_(zf.testzip() is None)
++ self.assertTrue(zf.testzip() is None)
+ zf.close()
+
+ def test_creation(self):
+@@ -140,7 +140,7 @@ class TestWriteArchiveFS(unittest.TestCase):
+ check_contents("b.txt", b("b"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+ check_contents("foo/second.txt", b("hai"))
+- check_contents(u"\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
++ check_contents("\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
+
+
+ #~ class TestAppendArchiveFS(TestWriteArchiveFS):
+--- fs/tests/test_errors.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_errors.py
+@@ -29,4 +29,4 @@ class TestFSError(unittest.TestCase):
+
+ def test_unicode_representation_of_error_with_non_ascii_characters(self):
+ path_error = PathError('/Shïrê/Frødø')
+- _ = unicode(path_error)
+\ No newline at end of file
++ _ = str(path_error)
+--- fs/tests/test_expose.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_expose.py
+@@ -149,7 +149,7 @@ if dokan.is_available:
+
+ def tearDown(self):
+ self.mount_proc.unmount()
+- for _ in xrange(10):
++ for _ in range(10):
+ try:
+ if self.mount_proc.poll() is None:
+ self.mount_proc.terminate()
+--- fs/tests/test_fs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_fs.py
+@@ -20,7 +20,7 @@ from fs import osfs
+ class TestOSFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest")
++ self.temp_dir = tempfile.mkdtemp("fstest")
+ self.fs = osfs.OSFS(self.temp_dir)
+
+ def tearDown(self):
+@@ -35,14 +35,14 @@ class TestOSFS(unittest.TestCase,FSTestCases,Threading
+
+ self.assertRaises(errors.InvalidCharsInPathError, self.fs.open, 'invalid\0file', 'wb')
+ self.assertFalse(self.fs.isvalidpath('invalid\0file'))
+- self.assert_(self.fs.isvalidpath('validfile'))
+- self.assert_(self.fs.isvalidpath('completely_valid/path/foo.bar'))
++ self.assertTrue(self.fs.isvalidpath('validfile'))
++ self.assertTrue(self.fs.isvalidpath('completely_valid/path/foo.bar'))
+
+
+ class TestSubFS(unittest.TestCase,FSTestCases,ThreadingTestCases):
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest")
++ self.temp_dir = tempfile.mkdtemp("fstest")
+ self.parent_fs = osfs.OSFS(self.temp_dir)
+ self.parent_fs.makedir("foo/bar", recursive=True)
+ self.fs = self.parent_fs.opendir("foo/bar")
+@@ -118,7 +118,7 @@ class TestTempFS(unittest.TestCase,FSTestCases,Threadi
+ def tearDown(self):
+ td = self.fs._temp_dir
+ self.fs.close()
+- self.assert_(not os.path.exists(td))
++ self.assertTrue(not os.path.exists(td))
+
+ def check(self, p):
+ td = self.fs._temp_dir
+@@ -129,5 +129,5 @@ class TestTempFS(unittest.TestCase,FSTestCases,Threadi
+
+ self.assertRaises(errors.InvalidCharsInPathError, self.fs.open, 'invalid\0file', 'wb')
+ self.assertFalse(self.fs.isvalidpath('invalid\0file'))
+- self.assert_(self.fs.isvalidpath('validfile'))
+- self.assert_(self.fs.isvalidpath('completely_valid/path/foo.bar'))
++ self.assertTrue(self.fs.isvalidpath('validfile'))
++ self.assertTrue(self.fs.isvalidpath('completely_valid/path/foo.bar'))
+--- fs/tests/test_ftpfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_ftpfs.py
+@@ -10,7 +10,7 @@ import tempfile
+ import subprocess
+ import time
+ from os.path import abspath
+-import urllib
++import urllib.request, urllib.parse, urllib.error
+
+ from six import PY3
+
+@@ -37,7 +37,7 @@ class TestFTPFS(unittest.TestCase, FSTestCases, Thread
+ ftp_port += 1
+ use_port = str(ftp_port)
+ #ftp_port = 10000
+- self.temp_dir = tempfile.mkdtemp(u"ftpfstests")
++ self.temp_dir = tempfile.mkdtemp("ftpfstests")
+
+ file_path = __file__
+ if ':' not in file_path:
+@@ -58,7 +58,7 @@ class TestFTPFS(unittest.TestCase, FSTestCases, Thread
+ start_time = time.time()
+ while time.time() - start_time < 5:
+ try:
+- ftpurl = urllib.urlopen('ftp://127.0.0.1:%s' % use_port)
++ ftpurl = urllib.request.urlopen('ftp://127.0.0.1:%s' % use_port)
+ except IOError:
+ time.sleep(0)
+ else:
+--- fs/tests/test_importhook.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_importhook.py
+@@ -25,7 +25,7 @@ class TestFSImportHook(unittest.TestCase):
+ for ph in list(sys.path_hooks):
+ if issubclass(ph,FSImportHook):
+ sys.path_hooks.remove(mph)
+- for (k,v) in sys.modules.items():
++ for (k,v) in list(sys.modules.items()):
+ if k.startswith("fsih_"):
+ del sys.modules[k]
+ elif hasattr(v,"__loader__"):
+@@ -64,22 +64,22 @@ class TestFSImportHook(unittest.TestCase):
+ ih = FSImportHook(t)
+ sys.meta_path.append(ih)
+ try:
+- self.assertEquals(ih.find_module("fsih_hello"),ih)
+- self.assertEquals(ih.find_module("fsih_helo"),None)
+- self.assertEquals(ih.find_module("fsih_pkg"),ih)
+- self.assertEquals(ih.find_module("fsih_pkg.sub1"),ih)
+- self.assertEquals(ih.find_module("fsih_pkg.sub2"),ih)
+- self.assertEquals(ih.find_module("fsih_pkg.sub3"),None)
++ self.assertEqual(ih.find_module("fsih_hello"),ih)
++ self.assertEqual(ih.find_module("fsih_helo"),None)
++ self.assertEqual(ih.find_module("fsih_pkg"),ih)
++ self.assertEqual(ih.find_module("fsih_pkg.sub1"),ih)
++ self.assertEqual(ih.find_module("fsih_pkg.sub2"),ih)
++ self.assertEqual(ih.find_module("fsih_pkg.sub3"),None)
+ m = ih.load_module("fsih_hello")
+- self.assertEquals(m.message,"hello world!")
++ self.assertEqual(m.message,"hello world!")
+ self.assertRaises(ImportError,ih.load_module,"fsih_helo")
+ ih.load_module("fsih_pkg")
+ m = ih.load_module("fsih_pkg.sub1")
+- self.assertEquals(m.message,"hello world!")
+- self.assertEquals(m.a,42)
++ self.assertEqual(m.message,"hello world!")
++ self.assertEqual(m.a,42)
+ m = ih.load_module("fsih_pkg.sub2")
+- self.assertEquals(m.message,"hello world!")
+- self.assertEquals(m.a,42 * 2)
++ self.assertEqual(m.message,"hello world!")
++ self.assertEqual(m.a,42 * 2)
+ self.assertRaises(ImportError,ih.load_module,"fsih_pkg.sub3")
+ finally:
+ sys.meta_path.remove(ih)
+@@ -88,7 +88,7 @@ class TestFSImportHook(unittest.TestCase):
+ def _check_imports_are_working(self):
+ try:
+ import fsih_hello
+- self.assertEquals(fsih_hello.message,"hello world!")
++ self.assertEqual(fsih_hello.message,"hello world!")
+ try:
+ import fsih_helo
+ except ImportError:
+@@ -97,11 +97,11 @@ class TestFSImportHook(unittest.TestCase):
+ assert False, "ImportError not raised"
+ import fsih_pkg
+ import fsih_pkg.sub1
+- self.assertEquals(fsih_pkg.sub1.message,"hello world!")
+- self.assertEquals(fsih_pkg.sub1.a,42)
++ self.assertEqual(fsih_pkg.sub1.message,"hello world!")
++ self.assertEqual(fsih_pkg.sub1.a,42)
+ import fsih_pkg.sub2
+- self.assertEquals(fsih_pkg.sub2.message,"hello world!")
+- self.assertEquals(fsih_pkg.sub2.a,42 * 2)
++ self.assertEqual(fsih_pkg.sub2.message,"hello world!")
++ self.assertEqual(fsih_pkg.sub2.a,42 * 2)
+ try:
+ import fsih_pkg.sub3
+ except ImportError:
+@@ -109,7 +109,7 @@ class TestFSImportHook(unittest.TestCase):
+ else:
+ assert False, "ImportError not raised"
+ finally:
+- for k in sys.modules.keys():
++ for k in list(sys.modules.keys()):
+ if k.startswith("fsih_"):
+ del sys.modules[k]
+
+--- fs/tests/test_iotools.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_iotools.py
+@@ -1,5 +1,5 @@
+-from __future__ import unicode_literals
+
++
+ from fs import iotools
+
+ import io
+@@ -7,9 +7,9 @@ import unittest
+ from os.path import dirname, join, abspath
+
+ try:
+- unicode
++ str
+ except NameError:
+- unicode = str
++ str = str
+
+
+ class OpenFilelike(object):
+@@ -37,20 +37,20 @@ class TestIOTools(unittest.TestCase):
+ """Test make_stream"""
+ with self.get_bin_file() as f:
+ text = f.read()
+- self.assert_(isinstance(text, bytes))
++ self.assertTrue(isinstance(text, bytes))
+
+ with self.get_bin_file() as f:
+ with iotools.make_stream("data/UTF-8-demo.txt", f, 'rt') as f2:
+ text = f2.read()
+- self.assert_(isinstance(text, unicode))
++ self.assertTrue(isinstance(text, str))
+
+ def test_decorator(self):
+ """Test filelike_to_stream decorator"""
+ o = OpenFilelike(self.get_bin_file)
+ with o.open('file', 'rb') as f:
+ text = f.read()
+- self.assert_(isinstance(text, bytes))
++ self.assertTrue(isinstance(text, bytes))
+
+ with o.open('file', 'rt') as f:
+ text = f.read()
+- self.assert_(isinstance(text, unicode))
++ self.assertTrue(isinstance(text, str))
+--- fs/tests/test_mountfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_mountfs.py
+@@ -12,11 +12,11 @@ class TestMountFS(unittest.TestCase):
+ m2 = MemoryFS()
+ multi_fs.mount('/m1', m1)
+ multi_fs.mount('/m2', m2)
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+ multi_fs.close()
+- self.assert_(m1.closed)
+- self.assert_(m2.closed)
++ self.assertTrue(m1.closed)
++ self.assertTrue(m2.closed)
+
+ def test_no_auto_close(self):
+ """Test MountFS auto close can be disabled"""
+@@ -25,11 +25,11 @@ class TestMountFS(unittest.TestCase):
+ m2 = MemoryFS()
+ multi_fs.mount('/m1', m1)
+ multi_fs.mount('/m2', m2)
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+ multi_fs.close()
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+
+ def test_mountfile(self):
+ """Test mounting a file"""
+@@ -42,16 +42,16 @@ class TestMountFS(unittest.TestCase):
+ mount_fs = MountFS()
+ mount_fs.mountfile('bar.txt', foo_dir.open, foo_dir.getinfo)
+
+- self.assert_(mount_fs.isdir('/'))
+- self.assert_(mount_fs.isdir('./'))
+- self.assert_(mount_fs.isdir(''))
++ self.assertTrue(mount_fs.isdir('/'))
++ self.assertTrue(mount_fs.isdir('./'))
++ self.assertTrue(mount_fs.isdir(''))
+
+ # Check we can see the mounted file in the dir list
+ self.assertEqual(mount_fs.listdir(), ["bar.txt"])
+- self.assert_(not mount_fs.exists('nobodyhere.txt'))
+- self.assert_(mount_fs.exists('bar.txt'))
+- self.assert_(mount_fs.isfile('bar.txt'))
+- self.assert_(not mount_fs.isdir('bar.txt'))
++ self.assertTrue(not mount_fs.exists('nobodyhere.txt'))
++ self.assertTrue(mount_fs.exists('bar.txt'))
++ self.assertTrue(mount_fs.isfile('bar.txt'))
++ self.assertTrue(not mount_fs.isdir('bar.txt'))
+
+ # Check open and getinfo callables
+ self.assertEqual(mount_fs.getcontents('bar.txt'), quote)
+@@ -67,9 +67,9 @@ class TestMountFS(unittest.TestCase):
+ self.assertEqual(mem_fs.getsize('foo/bar.txt'), len('baz'))
+
+ # Check unmount
+- self.assert_(mount_fs.unmount("bar.txt"))
++ self.assertTrue(mount_fs.unmount("bar.txt"))
+ self.assertEqual(mount_fs.listdir(), [])
+- self.assert_(not mount_fs.exists('bar.txt'))
++ self.assertTrue(not mount_fs.exists('bar.txt'))
+
+ # Check unount a second time is a null op, and returns False
+ self.assertFalse(mount_fs.unmount("bar.txt"))
+--- fs/tests/test_multifs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_multifs.py
+@@ -13,11 +13,11 @@ class TestMultiFS(unittest.TestCase):
+ m2 = MemoryFS()
+ multi_fs.addfs('m1', m1)
+ multi_fs.addfs('m2', m2)
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+ multi_fs.close()
+- self.assert_(m1.closed)
+- self.assert_(m2.closed)
++ self.assertTrue(m1.closed)
++ self.assertTrue(m2.closed)
+
+ def test_no_auto_close(self):
+ """Test MultiFS auto close can be disables"""
+@@ -26,11 +26,11 @@ class TestMultiFS(unittest.TestCase):
+ m2 = MemoryFS()
+ multi_fs.addfs('m1', m1)
+ multi_fs.addfs('m2', m2)
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+ multi_fs.close()
+- self.assert_(not m1.closed)
+- self.assert_(not m2.closed)
++ self.assertTrue(not m1.closed)
++ self.assertTrue(not m2.closed)
+
+
+ def test_priority(self):
+@@ -45,7 +45,7 @@ class TestMultiFS(unittest.TestCase):
+ multi_fs.addfs("m1", m1)
+ multi_fs.addfs("m2", m2)
+ multi_fs.addfs("m3", m3)
+- self.assert_(multi_fs.getcontents("name") == b("m3"))
++ self.assertTrue(multi_fs.getcontents("name") == b("m3"))
+
+ m1 = MemoryFS()
+ m2 = MemoryFS()
+@@ -57,7 +57,7 @@ class TestMultiFS(unittest.TestCase):
+ multi_fs.addfs("m1", m1)
+ multi_fs.addfs("m2", m2, priority=10)
+ multi_fs.addfs("m3", m3)
+- self.assert_(multi_fs.getcontents("name") == b("m2"))
++ self.assertTrue(multi_fs.getcontents("name") == b("m2"))
+
+ m1 = MemoryFS()
+ m2 = MemoryFS()
+@@ -69,7 +69,7 @@ class TestMultiFS(unittest.TestCase):
+ multi_fs.addfs("m1", m1)
+ multi_fs.addfs("m2", m2, priority=10)
+ multi_fs.addfs("m3", m3, priority=10)
+- self.assert_(multi_fs.getcontents("name") == b("m3"))
++ self.assertTrue(multi_fs.getcontents("name") == b("m3"))
+
+ m1 = MemoryFS()
+ m2 = MemoryFS()
+@@ -81,5 +81,5 @@ class TestMultiFS(unittest.TestCase):
+ multi_fs.addfs("m1", m1, priority=11)
+ multi_fs.addfs("m2", m2, priority=10)
+ multi_fs.addfs("m3", m3, priority=10)
+- self.assert_(multi_fs.getcontents("name") == b("m1"))
++ self.assertTrue(multi_fs.getcontents("name") == b("m1"))
+
+--- fs/tests/test_opener.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_opener.py
+@@ -14,7 +14,7 @@ from fs import path
+ class TestOpener(unittest.TestCase):
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest_opener")
++ self.temp_dir = tempfile.mkdtemp("fstest_opener")
+
+ def tearDown(self):
+ shutil.rmtree(self.temp_dir)
+--- fs/tests/test_path.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_path.py
+@@ -23,7 +23,7 @@ class TestPathFunctions(unittest.TestCase):
+ ("a/b/c", "a/b/c"),
+ ("a/b/../c/", "a/c"),
+ ("/","/"),
+- (u"a/\N{GREEK SMALL LETTER BETA}/c",u"a/\N{GREEK SMALL LETTER BETA}/c"),
++ ("a/\N{GREEK SMALL LETTER BETA}/c","a/\N{GREEK SMALL LETTER BETA}/c"),
+ ]
+ for path, result in tests:
+ self.assertEqual(normpath(path), result)
+@@ -44,7 +44,7 @@ class TestPathFunctions(unittest.TestCase):
+ ("a/b", "./d", "e", "a/b/d/e"),
+ ("/", "/", "/"),
+ ("/", "", "/"),
+- (u"a/\N{GREEK SMALL LETTER BETA}","c",u"a/\N{GREEK SMALL LETTER BETA}/c"),
++ ("a/\N{GREEK SMALL LETTER BETA}","c","a/\N{GREEK SMALL LETTER BETA}/c"),
+ ]
+ for testpaths in tests:
+ paths = testpaths[:-1]
+@@ -101,12 +101,12 @@ class TestPathFunctions(unittest.TestCase):
+ self.assertEqual(pathsplit(path), result)
+
+ def test_recursepath(self):
+- self.assertEquals(recursepath("/"),["/"])
+- self.assertEquals(recursepath("hello"),["/","/hello"])
+- self.assertEquals(recursepath("/hello/world/"),["/","/hello","/hello/world"])
+- self.assertEquals(recursepath("/hello/world/",reverse=True),["/hello/world","/hello","/"])
+- self.assertEquals(recursepath("hello",reverse=True),["/hello","/"])
+- self.assertEquals(recursepath("",reverse=True),["/"])
++ self.assertEqual(recursepath("/"),["/"])
++ self.assertEqual(recursepath("hello"),["/","/hello"])
++ self.assertEqual(recursepath("/hello/world/"),["/","/hello","/hello/world"])
++ self.assertEqual(recursepath("/hello/world/",reverse=True),["/hello/world","/hello","/"])
++ self.assertEqual(recursepath("hello",reverse=True),["/hello","/"])
++ self.assertEqual(recursepath("",reverse=True),["/"])
+
+ def test_isdotfile(self):
+ for path in ['.foo',
+@@ -114,7 +114,7 @@ class TestPathFunctions(unittest.TestCase):
+ 'foo/.svn',
+ 'foo/bar/.svn',
+ '/foo/.bar']:
+- self.assert_(isdotfile(path))
++ self.assertTrue(isdotfile(path))
+
+ for path in ['asfoo',
+ 'df.svn',
+@@ -142,10 +142,10 @@ class TestPathFunctions(unittest.TestCase):
+ self.assertEqual(basename(path), test_basename)
+
+ def test_iswildcard(self):
+- self.assert_(iswildcard('*'))
+- self.assert_(iswildcard('*.jpg'))
+- self.assert_(iswildcard('foo/*'))
+- self.assert_(iswildcard('foo/{}'))
++ self.assertTrue(iswildcard('*'))
++ self.assertTrue(iswildcard('*.jpg'))
++ self.assertTrue(iswildcard('foo/*'))
++ self.assertTrue(iswildcard('foo/{}'))
+ self.assertFalse(iswildcard('foo'))
+ self.assertFalse(iswildcard('img.jpg'))
+ self.assertFalse(iswildcard('foo/bar'))
+@@ -171,9 +171,9 @@ class Test_PathMap(unittest.TestCase):
+ def test_basics(self):
+ map = PathMap()
+ map["hello"] = "world"
+- self.assertEquals(map["/hello"],"world")
+- self.assertEquals(map["/hello/"],"world")
+- self.assertEquals(map.get("hello"),"world")
++ self.assertEqual(map["/hello"],"world")
++ self.assertEqual(map["/hello/"],"world")
++ self.assertEqual(map.get("hello"),"world")
+
+ def test_iteration(self):
+ map = PathMap()
+@@ -183,17 +183,17 @@ class Test_PathMap(unittest.TestCase):
+ map["hello/kitty"] = 4
+ map["hello/kitty/islame"] = 5
+ map["batman/isawesome"] = 6
+- self.assertEquals(set(map.iterkeys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/hello/kitty/islame","/batman/isawesome")))
+- self.assertEquals(sorted(map.values()),range(1,7))
+- self.assertEquals(sorted(map.items("/hello/world/")),[("/hello/world",1),("/hello/world/howareya",2),("/hello/world/iamfine",3)])
+- self.assertEquals(zip(map.keys(),map.values()),map.items())
+- self.assertEquals(zip(map.keys("batman"),map.values("batman")),map.items("batman"))
+- self.assertEquals(set(map.iternames("hello")),set(("world","kitty")))
+- self.assertEquals(set(map.iternames("/hello/kitty")),set(("islame",)))
++ self.assertEqual(set(map.keys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/hello/kitty/islame","/batman/isawesome")))
++ self.assertEqual(sorted(map.values()),list(range(1,7)))
++ self.assertEqual(sorted(map.items("/hello/world/")),[("/hello/world",1),("/hello/world/howareya",2),("/hello/world/iamfine",3)])
++ self.assertEqual(list(zip(list(map.keys()),list(map.values()))),list(map.items()))
++ self.assertEqual(list(zip(map.keys("batman"),map.values("batman"))),map.items("batman"))
++ self.assertEqual(set(map.iternames("hello")),set(("world","kitty")))
++ self.assertEqual(set(map.iternames("/hello/kitty")),set(("islame",)))
+
+ del map["hello/kitty/islame"]
+- self.assertEquals(set(map.iternames("/hello/kitty")),set())
+- self.assertEquals(set(map.iterkeys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/batman/isawesome")))
+- self.assertEquals(set(map.values()),set(range(1,7)) - set((5,)))
++ self.assertEqual(set(map.iternames("/hello/kitty")),set())
++ self.assertEqual(set(map.keys()),set(("/hello/world","/hello/world/howareya","/hello/world/iamfine","/hello/kitty","/batman/isawesome")))
++ self.assertEqual(set(map.values()),set(range(1,7)) - set((5,)))
+
+
+--- fs/tests/test_remote.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_remote.py
+@@ -116,37 +116,37 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCa
+ self.fakeOn()
+
+ f = self.fs.open('test.txt', 'rb')
+- self.assertEquals(f.read(10), contents[:10])
++ self.assertEqual(f.read(10), contents[:10])
+ f.wrapped_file.seek(0, SEEK_END)
+- self.assertEquals(f._rfile.tell(), 10)
++ self.assertEqual(f._rfile.tell(), 10)
+ f.seek(20)
+- self.assertEquals(f.tell(), 20)
+- self.assertEquals(f._rfile.tell(), 20)
++ self.assertEqual(f.tell(), 20)
++ self.assertEqual(f._rfile.tell(), 20)
+ f.seek(0, SEEK_END)
+- self.assertEquals(f._rfile.tell(), len(contents))
++ self.assertEqual(f._rfile.tell(), len(contents))
+ f.close()
+
+ f = self.fs.open('test.txt', 'ab')
+- self.assertEquals(f.tell(), len(contents))
++ self.assertEqual(f.tell(), len(contents))
+ f.close()
+
+ self.fakeOff()
+
+ # Writing over the rfile edge
+ f = self.fs.open('test.txt', 'wb+')
+- self.assertEquals(f.tell(), 0)
++ self.assertEqual(f.tell(), 0)
+ f.seek(len(contents) - 5)
+ # Last 5 characters not loaded from remote file
+- self.assertEquals(f._rfile.tell(), len(contents) - 5)
++ self.assertEqual(f._rfile.tell(), len(contents) - 5)
+ # Confirm that last 5 characters are still in rfile buffer
+- self.assertEquals(f._rfile.read(), contents[-5:])
++ self.assertEqual(f._rfile.read(), contents[-5:])
+ # Rollback position 5 characters before eof
+ f._rfile.seek(len(contents[:-5]))
+ # Write 10 new characters (will make contents longer for 5 chars)
+ f.write(b('1234567890'))
+ f.flush()
+ # We are on the end of file (and buffer not serve anything anymore)
+- self.assertEquals(f.read(), b(''))
++ self.assertEqual(f.read(), b(''))
+ f.close()
+
+ self.fakeOn()
+@@ -154,7 +154,7 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCa
+ # Check if we wrote everything OK from
+ # previous writing over the remote buffer edge
+ f = self.fs.open('test.txt', 'rb')
+- self.assertEquals(f.read(), contents[:-5] + b('1234567890'))
++ self.assertEqual(f.read(), contents[:-5] + b('1234567890'))
+ f.close()
+
+ self.fakeOff()
+@@ -199,36 +199,36 @@ class TestRemoteFileBuffer(unittest.TestCase, FSTestCa
+
+ f = self.fs.open('test.txt', 'rb+')
+ # Check if we read just 10 characters
+- self.assertEquals(f.read(10), contents[:10])
+- self.assertEquals(f._rfile.tell(), 10)
++ self.assertEqual(f.read(10), contents[:10])
++ self.assertEqual(f._rfile.tell(), 10)
+ # Write garbage to file to mark it as _changed
+ f.write(b('x'))
+ # This should read the rest of file and store file back to again.
+ f.flush()
+ f.seek(0)
+ # Try if we have unocrrupted file locally...
+- self.assertEquals(f.read(), contents[:10] + b('x') + contents[11:])
++ self.assertEqual(f.read(), contents[:10] + b('x') + contents[11:])
+ f.close()
+
+ # And if we have uncorrupted file also on storage
+ f = self.fs.open('test.txt', 'rb')
+- self.assertEquals(f.read(), contents[:10] + b('x') + contents[11:])
++ self.assertEqual(f.read(), contents[:10] + b('x') + contents[11:])
+ f.close()
+
+ # Now try it again, but write garbage behind edge of remote file
+ f = self.fs.open('test.txt', 'rb+')
+- self.assertEquals(f.read(10), contents[:10])
++ self.assertEqual(f.read(10), contents[:10])
+ # Write garbage to file to mark it as _changed
+ f.write(contents2)
+ f.flush()
+ f.seek(0)
+ # Try if we have unocrrupted file locally...
+- self.assertEquals(f.read(), contents[:10] + contents2)
++ self.assertEqual(f.read(), contents[:10] + contents2)
+ f.close()
+
+ # And if we have uncorrupted file also on storage
+ f = self.fs.open('test.txt', 'rb')
+- self.assertEquals(f.read(), contents[:10] + contents2)
++ self.assertEqual(f.read(), contents[:10] + contents2)
+ f.close()
+
+
+--- fs/tests/test_rpcfs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/tests/test_rpcfs.py
+@@ -48,7 +48,7 @@ class TestRPCFS(unittest.TestCase, FSTestCases, Thread
+ while not self.server:
+ try:
+ self.server = self.makeServer(self.temp_fs,("127.0.0.1",port))
+- except socket.error, e:
++ except socket.error as e:
+ if e.args[1] == "Address already in use":
+ port += 1
+ else:
+@@ -63,7 +63,7 @@ class TestRPCFS(unittest.TestCase, FSTestCases, Thread
+ #self.server.serve_forever()
+ while self.serve_more_requests:
+ self.server.handle_request()
+- except Exception, e:
++ except Exception as e:
+ pass
+
+ self.end_event.set()
+@@ -93,7 +93,7 @@ class TestRPCFS(unittest.TestCase, FSTestCases, Thread
+ sock.settimeout(.1)
+ sock.connect(sa)
+ sock.send(b("\n"))
+- except socket.error, e:
++ except socket.error as e:
+ pass
+ finally:
+ if sock is not None:
+--- fs/tests/test_utils.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_utils.py
+@@ -16,11 +16,11 @@ class TestUtils(unittest.TestCase):
+ fs.setcontents("foo/bar/fruit", b("apple"))
+
+ def _check_fs(self, fs):
+- self.assert_(fs.isfile("f1"))
+- self.assert_(fs.isfile("f2"))
+- self.assert_(fs.isfile("f3"))
+- self.assert_(fs.isdir("foo/bar"))
+- self.assert_(fs.isfile("foo/bar/fruit"))
++ self.assertTrue(fs.isfile("f1"))
++ self.assertTrue(fs.isfile("f2"))
++ self.assertTrue(fs.isfile("f3"))
++ self.assertTrue(fs.isdir("foo/bar"))
++ self.assertTrue(fs.isfile("foo/bar/fruit"))
+ self.assertEqual(fs.getcontents("f1", "rb"), b("file 1"))
+ self.assertEqual(fs.getcontents("f2", "rb"), b("file 2"))
+ self.assertEqual(fs.getcontents("f3", "rb"), b("file 3"))
+@@ -61,7 +61,7 @@ class TestUtils(unittest.TestCase):
+ fs1sub = fs1.makeopendir("from")
+ self._make_fs(fs1sub)
+ utils.movedir((fs1, "from"), (fs2, "copy"))
+- self.assert_(not fs1.exists("from"))
++ self.assertTrue(not fs1.exists("from"))
+ self._check_fs(fs2.opendir("copy"))
+
+ fs1 = TempFS()
+@@ -69,7 +69,7 @@ class TestUtils(unittest.TestCase):
+ fs1sub = fs1.makeopendir("from")
+ self._make_fs(fs1sub)
+ utils.movedir((fs1, "from"), (fs2, "copy"))
+- self.assert_(not fs1.exists("from"))
++ self.assertTrue(not fs1.exists("from"))
+ self._check_fs(fs2.opendir("copy"))
+
+ def test_movedir_root(self):
+@@ -79,7 +79,7 @@ class TestUtils(unittest.TestCase):
+ fs1sub = fs1.makeopendir("from")
+ self._make_fs(fs1sub)
+ utils.movedir((fs1, "from"), fs2)
+- self.assert_(not fs1.exists("from"))
++ self.assertTrue(not fs1.exists("from"))
+ self._check_fs(fs2)
+
+ fs1 = TempFS()
+@@ -87,7 +87,7 @@ class TestUtils(unittest.TestCase):
+ fs1sub = fs1.makeopendir("from")
+ self._make_fs(fs1sub)
+ utils.movedir((fs1, "from"), fs2)
+- self.assert_(not fs1.exists("from"))
++ self.assertTrue(not fs1.exists("from"))
+ self._check_fs(fs2)
+
+ def test_remove_all(self):
+@@ -101,15 +101,15 @@ class TestUtils(unittest.TestCase):
+ fs.setcontents("foo/baz", b("baz"))
+
+ utils.remove_all(fs, "foo/bar")
+- self.assert_(not fs.exists("foo/bar/fruit"))
+- self.assert_(fs.exists("foo/bar"))
+- self.assert_(fs.exists("foo/baz"))
++ self.assertTrue(not fs.exists("foo/bar/fruit"))
++ self.assertTrue(fs.exists("foo/bar"))
++ self.assertTrue(fs.exists("foo/baz"))
+ utils.remove_all(fs, "")
+- self.assert_(not fs.exists("foo/bar/fruit"))
+- self.assert_(not fs.exists("foo/bar/baz"))
+- self.assert_(not fs.exists("foo/baz"))
+- self.assert_(not fs.exists("foo"))
+- self.assert_(not fs.exists("f1"))
+- self.assert_(fs.isdirempty('/'))
++ self.assertTrue(not fs.exists("foo/bar/fruit"))
++ self.assertTrue(not fs.exists("foo/bar/baz"))
++ self.assertTrue(not fs.exists("foo/baz"))
++ self.assertTrue(not fs.exists("foo"))
++ self.assertTrue(not fs.exists("f1"))
++ self.assertTrue(fs.isdirempty('/'))
+
+
+--- fs/tests/test_watch.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_watch.py
+@@ -71,7 +71,7 @@ class WatcherTestCases:
+ for event in event_list:
+ if isinstance(event,cls):
+ if path is None or event.path == path:
+- for (k,v) in attrs.iteritems():
++ for (k,v) in attrs.items():
+ if getattr(event,k) != v:
+ break
+ else:
+@@ -98,7 +98,7 @@ class WatcherTestCases:
+ self.assertEventOccurred(CREATED,"/hello")
+ self.clearCapturedEvents()
+ old_atime = self.fs.getinfo("hello").get("accessed_time")
+- self.assertEquals(self.fs.getcontents("hello"), b("hello world"))
++ self.assertEqual(self.fs.getcontents("hello"), b("hello world"))
+ if not isinstance(self.watchfs,PollingWatchableFS):
+ # Help it along by updting the atime.
+ # TODO: why is this necessary?
+@@ -113,7 +113,7 @@ class WatcherTestCases:
+ # update it if it's too old, or don't update it at all!
+ # Try to force the issue, wait for it to change, but eventually
+ # give up and bail out.
+- for i in xrange(10):
++ for i in range(10):
+ if self.fs.getinfo("hello").get("accessed_time") != old_atime:
+ if not self.checkEventOccurred(MODIFIED,"/hello"):
+ self.assertEventOccurred(ACCESSED,"/hello")
+@@ -142,7 +142,7 @@ class WatcherTestCases:
+ self.waitForEvents()
+ for evt in events:
+ assert isinstance(evt,MODIFIED)
+- self.assertEquals(evt.path,"/hello")
++ self.assertEqual(evt.path,"/hello")
+
+ def test_watch_single_file_remove(self):
+ self.fs.makedir("testing")
+@@ -153,9 +153,9 @@ class WatcherTestCases:
+ self.waitForEvents()
+ self.fs.remove("testing/hello")
+ self.waitForEvents()
+- self.assertEquals(len(events),1)
++ self.assertEqual(len(events),1)
+ assert isinstance(events[0],REMOVED)
+- self.assertEquals(events[0].path,"/testing/hello")
++ self.assertEqual(events[0].path,"/testing/hello")
+
+ def test_watch_iter_changes(self):
+ changes = iter_changes(self.watchfs)
+@@ -195,9 +195,9 @@ class TestWatchers_TempFS(unittest.TestCase,FSTestCase
+ watchfs = osfs.OSFS(self.fs.root_path)
+ self.watchfs = ensure_watchable(watchfs,poll_interval=0.1)
+ if watch_inotify is not None:
+- self.assertEquals(watchfs,self.watchfs)
++ self.assertEqual(watchfs,self.watchfs)
+ if watch_win32 is not None:
+- self.assertEquals(watchfs,self.watchfs)
++ self.assertEqual(watchfs,self.watchfs)
+
+ def tearDown(self):
+ self.watchfs.close()
+--- fs/tests/test_wrapfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_wrapfs.py
+@@ -26,7 +26,7 @@ class TestWrapFS(unittest.TestCase, FSTestCases, Threa
+ #__test__ = False
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest")
++ self.temp_dir = tempfile.mkdtemp("fstest")
+ self.fs = wrapfs.WrapFS(osfs.OSFS(self.temp_dir))
+
+ def tearDown(self):
+@@ -41,7 +41,7 @@ from fs.wrapfs.lazyfs import LazyFS
+ class TestLazyFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest")
++ self.temp_dir = tempfile.mkdtemp("fstest")
+ self.fs = LazyFS((osfs.OSFS,(self.temp_dir,)))
+
+ def tearDown(self):
+@@ -63,13 +63,13 @@ class TestLimitSizeFS(TestWrapFS):
+
+ def tearDown(self):
+ remove_all(self.fs, "/")
+- self.assertEquals(self.fs.cur_size,0)
++ self.assertEqual(self.fs.cur_size,0)
+ super(TestLimitSizeFS,self).tearDown()
+ self.fs.close()
+
+ def test_storage_error(self):
+ total_written = 0
+- for i in xrange(1024*2):
++ for i in range(1024*2):
+ try:
+ total_written += 1030
+ self.fs.setcontents("file %i" % i, b("C")*1030)
+@@ -85,11 +85,11 @@ from fs.wrapfs.hidedotfilesfs import HideDotFilesFS
+ class TestHideDotFilesFS(unittest.TestCase):
+
+ def setUp(self):
+- self.temp_dir = tempfile.mkdtemp(u"fstest")
+- open(os.path.join(self.temp_dir, u".dotfile"), 'w').close()
+- open(os.path.join(self.temp_dir, u"regularfile"), 'w').close()
+- os.mkdir(os.path.join(self.temp_dir, u".dotdir"))
+- os.mkdir(os.path.join(self.temp_dir, u"regulardir"))
++ self.temp_dir = tempfile.mkdtemp("fstest")
++ open(os.path.join(self.temp_dir, ".dotfile"), 'w').close()
++ open(os.path.join(self.temp_dir, "regularfile"), 'w').close()
++ os.mkdir(os.path.join(self.temp_dir, ".dotdir"))
++ os.mkdir(os.path.join(self.temp_dir, "regulardir"))
+ self.fs = HideDotFilesFS(osfs.OSFS(self.temp_dir))
+
+ def tearDown(self):
+@@ -97,15 +97,15 @@ class TestHideDotFilesFS(unittest.TestCase):
+ self.fs.close()
+
+ def test_hidden(self):
+- self.assertEquals(len(self.fs.listdir(hidden=False)), 2)
+- self.assertEquals(len(list(self.fs.ilistdir(hidden=False))), 2)
++ self.assertEqual(len(self.fs.listdir(hidden=False)), 2)
++ self.assertEqual(len(list(self.fs.ilistdir(hidden=False))), 2)
+
+ def test_nonhidden(self):
+- self.assertEquals(len(self.fs.listdir(hidden=True)), 4)
+- self.assertEquals(len(list(self.fs.ilistdir(hidden=True))), 4)
++ self.assertEqual(len(self.fs.listdir(hidden=True)), 4)
++ self.assertEqual(len(list(self.fs.ilistdir(hidden=True))), 4)
+
+ def test_default(self):
+- self.assertEquals(len(self.fs.listdir()), 2)
+- self.assertEquals(len(list(self.fs.ilistdir())), 2)
++ self.assertEqual(len(self.fs.listdir()), 2)
++ self.assertEqual(len(list(self.fs.ilistdir())), 2)
+
+
+--- fs/tests/test_xattr.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_xattr.py
+@@ -37,19 +37,19 @@ class XAttrTestCases:
+
+ def test_list_xattrs(self):
+ def do_list(p):
+- self.assertEquals(sorted(self.fs.listxattrs(p)),[])
++ self.assertEqual(sorted(self.fs.listxattrs(p)),[])
+ self.fs.setxattr(p,"xattr1","value1")
+- self.assertEquals(self.fs.getxattr(p,"xattr1"),"value1")
+- self.assertEquals(sorted(self.fs.listxattrs(p)),["xattr1"])
+- self.assertTrue(isinstance(self.fs.listxattrs(p)[0],unicode))
++ self.assertEqual(self.fs.getxattr(p,"xattr1"),"value1")
++ self.assertEqual(sorted(self.fs.listxattrs(p)),["xattr1"])
++ self.assertTrue(isinstance(self.fs.listxattrs(p)[0],str))
+ self.fs.setxattr(p,"attr2","value2")
+- self.assertEquals(sorted(self.fs.listxattrs(p)),["attr2","xattr1"])
+- self.assertTrue(isinstance(self.fs.listxattrs(p)[0],unicode))
+- self.assertTrue(isinstance(self.fs.listxattrs(p)[1],unicode))
++ self.assertEqual(sorted(self.fs.listxattrs(p)),["attr2","xattr1"])
++ self.assertTrue(isinstance(self.fs.listxattrs(p)[0],str))
++ self.assertTrue(isinstance(self.fs.listxattrs(p)[1],str))
+ self.fs.delxattr(p,"xattr1")
+- self.assertEquals(sorted(self.fs.listxattrs(p)),["attr2"])
++ self.assertEqual(sorted(self.fs.listxattrs(p)),["attr2"])
+ self.fs.delxattr(p,"attr2")
+- self.assertEquals(sorted(self.fs.listxattrs(p)),[])
++ self.assertEqual(sorted(self.fs.listxattrs(p)),[])
+ self.fs.setcontents("test.txt",b("hello"))
+ do_list("test.txt")
+ self.fs.makedir("mystuff")
+@@ -64,16 +64,16 @@ class XAttrTestCases:
+ self.fs.makedir("stuff")
+ self.fs.copy("a.txt","stuff/a.txt")
+ self.assertTrue(self.fs.exists("stuff/a.txt"))
+- self.assertEquals(self.fs.getxattr("stuff/a.txt","myattr"),"myvalue")
+- self.assertEquals(self.fs.getxattr("stuff/a.txt","testattr"),"testvalue")
+- self.assertEquals(self.fs.getxattr("a.txt","myattr"),"myvalue")
+- self.assertEquals(self.fs.getxattr("a.txt","testattr"),"testvalue")
++ self.assertEqual(self.fs.getxattr("stuff/a.txt","myattr"),"myvalue")
++ self.assertEqual(self.fs.getxattr("stuff/a.txt","testattr"),"testvalue")
++ self.assertEqual(self.fs.getxattr("a.txt","myattr"),"myvalue")
++ self.assertEqual(self.fs.getxattr("a.txt","testattr"),"testvalue")
+ self.fs.setxattr("stuff","dirattr","a directory")
+ self.fs.copydir("stuff","stuff2")
+- self.assertEquals(self.fs.getxattr("stuff2/a.txt","myattr"),"myvalue")
+- self.assertEquals(self.fs.getxattr("stuff2/a.txt","testattr"),"testvalue")
+- self.assertEquals(self.fs.getxattr("stuff2","dirattr"),"a directory")
+- self.assertEquals(self.fs.getxattr("stuff","dirattr"),"a directory")
++ self.assertEqual(self.fs.getxattr("stuff2/a.txt","myattr"),"myvalue")
++ self.assertEqual(self.fs.getxattr("stuff2/a.txt","testattr"),"testvalue")
++ self.assertEqual(self.fs.getxattr("stuff2","dirattr"),"a directory")
++ self.assertEqual(self.fs.getxattr("stuff","dirattr"),"a directory")
+
+ def test_move_xattrs(self):
+ self.fs.setcontents("a.txt",b("content"))
+@@ -82,29 +82,29 @@ class XAttrTestCases:
+ self.fs.makedir("stuff")
+ self.fs.move("a.txt","stuff/a.txt")
+ self.assertTrue(self.fs.exists("stuff/a.txt"))
+- self.assertEquals(self.fs.getxattr("stuff/a.txt","myattr"),"myvalue")
+- self.assertEquals(self.fs.getxattr("stuff/a.txt","testattr"),"testvalue")
++ self.assertEqual(self.fs.getxattr("stuff/a.txt","myattr"),"myvalue")
++ self.assertEqual(self.fs.getxattr("stuff/a.txt","testattr"),"testvalue")
+ self.fs.setxattr("stuff","dirattr","a directory")
+ self.fs.movedir("stuff","stuff2")
+- self.assertEquals(self.fs.getxattr("stuff2/a.txt","myattr"),"myvalue")
+- self.assertEquals(self.fs.getxattr("stuff2/a.txt","testattr"),"testvalue")
+- self.assertEquals(self.fs.getxattr("stuff2","dirattr"),"a directory")
++ self.assertEqual(self.fs.getxattr("stuff2/a.txt","myattr"),"myvalue")
++ self.assertEqual(self.fs.getxattr("stuff2/a.txt","testattr"),"testvalue")
++ self.assertEqual(self.fs.getxattr("stuff2","dirattr"),"a directory")
+
+ def test_remove_file(self):
+ def listxattrs(path):
+ return list(self.fs.listxattrs(path))
+ # Check that xattrs aren't preserved after a file is removed
+ self.fs.createfile("myfile")
+- self.assertEquals(listxattrs("myfile"),[])
++ self.assertEqual(listxattrs("myfile"),[])
+ self.fs.setxattr("myfile","testattr","testvalue")
+- self.assertEquals(listxattrs("myfile"),["testattr"])
++ self.assertEqual(listxattrs("myfile"),["testattr"])
+ self.fs.remove("myfile")
+ self.assertRaises(ResourceNotFoundError,listxattrs,"myfile")
+ self.fs.createfile("myfile")
+- self.assertEquals(listxattrs("myfile"),[])
++ self.assertEqual(listxattrs("myfile"),[])
+ self.fs.setxattr("myfile","testattr2","testvalue2")
+- self.assertEquals(listxattrs("myfile"),["testattr2"])
+- self.assertEquals(self.fs.getxattr("myfile","testattr2"),"testvalue2")
++ self.assertEqual(listxattrs("myfile"),["testattr2"])
++ self.assertEqual(self.fs.getxattr("myfile","testattr2"),"testvalue2")
+ # Check that removing a file without xattrs still works
+ self.fs.createfile("myfile2")
+ self.fs.remove("myfile2")
+@@ -114,16 +114,16 @@ class XAttrTestCases:
+ return list(self.fs.listxattrs(path))
+ # Check that xattrs aren't preserved after a dir is removed
+ self.fs.makedir("mydir")
+- self.assertEquals(listxattrs("mydir"),[])
++ self.assertEqual(listxattrs("mydir"),[])
+ self.fs.setxattr("mydir","testattr","testvalue")
+- self.assertEquals(listxattrs("mydir"),["testattr"])
++ self.assertEqual(listxattrs("mydir"),["testattr"])
+ self.fs.removedir("mydir")
+ self.assertRaises(ResourceNotFoundError,listxattrs,"mydir")
+ self.fs.makedir("mydir")
+- self.assertEquals(listxattrs("mydir"),[])
++ self.assertEqual(listxattrs("mydir"),[])
+ self.fs.setxattr("mydir","testattr2","testvalue2")
+- self.assertEquals(listxattrs("mydir"),["testattr2"])
+- self.assertEquals(self.fs.getxattr("mydir","testattr2"),"testvalue2")
++ self.assertEqual(listxattrs("mydir"),["testattr2"])
++ self.assertEqual(self.fs.getxattr("mydir","testattr2"),"testvalue2")
+ # Check that removing a dir without xattrs still works
+ self.fs.makedir("mydir2")
+ self.fs.removedir("mydir2")
+@@ -149,7 +149,7 @@ class TestXAttr_TempFS(unittest.TestCase,FSTestCases,X
+ except AttributeError:
+ td = self.fs.wrapped_fs._temp_dir
+ self.fs.close()
+- self.assert_(not os.path.exists(td))
++ self.assertTrue(not os.path.exists(td))
+
+ def check(self, p):
+ try:
+--- fs/tests/test_zipfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/tests/test_zipfs.py
+@@ -52,7 +52,7 @@ class TestReadZipFS(unittest.TestCase):
+ return contents
+
+ def check_contents(path, expected):
+- self.assert_(read_contents(path) == expected)
++ self.assertTrue(read_contents(path) == expected)
+ check_contents("a.txt", b("Hello, World!"))
+ check_contents("1.txt", b("1"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+@@ -62,30 +62,30 @@ class TestReadZipFS(unittest.TestCase):
+ return self.fs.getcontents(path, 'rb')
+
+ def check_contents(path, expected):
+- self.assert_(read_contents(path) == expected)
++ self.assertTrue(read_contents(path) == expected)
+ check_contents("a.txt", b("Hello, World!"))
+ check_contents("1.txt", b("1"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+
+ def test_is(self):
+- self.assert_(self.fs.isfile('a.txt'))
+- self.assert_(self.fs.isfile('1.txt'))
+- self.assert_(self.fs.isfile('foo/bar/baz.txt'))
+- self.assert_(self.fs.isdir('foo'))
+- self.assert_(self.fs.isdir('foo/bar'))
+- self.assert_(self.fs.exists('a.txt'))
+- self.assert_(self.fs.exists('1.txt'))
+- self.assert_(self.fs.exists('foo/bar/baz.txt'))
+- self.assert_(self.fs.exists('foo'))
+- self.assert_(self.fs.exists('foo/bar'))
++ self.assertTrue(self.fs.isfile('a.txt'))
++ self.assertTrue(self.fs.isfile('1.txt'))
++ self.assertTrue(self.fs.isfile('foo/bar/baz.txt'))
++ self.assertTrue(self.fs.isdir('foo'))
++ self.assertTrue(self.fs.isdir('foo/bar'))
++ self.assertTrue(self.fs.exists('a.txt'))
++ self.assertTrue(self.fs.exists('1.txt'))
++ self.assertTrue(self.fs.exists('foo/bar/baz.txt'))
++ self.assertTrue(self.fs.exists('foo'))
++ self.assertTrue(self.fs.exists('foo/bar'))
+
+ def test_listdir(self):
+
+ def check_listing(path, expected):
+ dir_list = self.fs.listdir(path)
+- self.assert_(sorted(dir_list) == sorted(expected))
++ self.assertTrue(sorted(dir_list) == sorted(expected))
+ for item in dir_list:
+- self.assert_(isinstance(item, unicode))
++ self.assertTrue(isinstance(item, str))
+ check_listing('/', ['a.txt', '1.txt', 'foo', 'b.txt'])
+ check_listing('foo', ['second.txt', 'bar'])
+ check_listing('foo/bar', ['baz.txt'])
+@@ -108,7 +108,7 @@ class TestWriteZipFS(unittest.TestCase):
+
+ makefile("a.txt", b("Hello, World!"))
+ makefile("b.txt", b("b"))
+- makefile(u"\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
++ makefile("\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
+ makefile("foo/bar/baz.txt", b("baz"))
+ makefile("foo/second.txt", b("hai"))
+
+@@ -119,7 +119,7 @@ class TestWriteZipFS(unittest.TestCase):
+
+ def test_valid(self):
+ zf = zipfile.ZipFile(self.temp_filename, "r")
+- self.assert_(zf.testzip() is None)
++ self.assertTrue(zf.testzip() is None)
+ zf.close()
+
+ def test_creation(self):
+@@ -134,7 +134,7 @@ class TestWriteZipFS(unittest.TestCase):
+ check_contents("b.txt", b("b"))
+ check_contents("foo/bar/baz.txt", b("baz"))
+ check_contents("foo/second.txt", b("hai"))
+- check_contents(u"\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
++ check_contents("\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
+
+
+ class TestAppendZipFS(TestWriteZipFS):
+@@ -159,7 +159,7 @@ class TestAppendZipFS(TestWriteZipFS):
+ zip_fs = zipfs.ZipFS(self.temp_filename, 'a')
+
+ makefile("foo/bar/baz.txt", b("baz"))
+- makefile(u"\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
++ makefile("\N{GREEK SMALL LETTER ALPHA}/\N{GREEK CAPITAL LETTER OMEGA}.txt", b("this is the alpha and the omega"))
+ makefile("foo/second.txt", b("hai"))
+
+ zip_fs.close()
+--- fs/utils.py.orig 2015-11-13 23:12:33 UTC
++++ fs/utils.py
+@@ -384,7 +384,7 @@ def isfile(fs,path,info=None):
+ def contains_files(fs, path='/'):
+ """Check if there are any files in the filesystem"""
+ try:
+- iter(fs.walkfiles(path)).next()
++ next(iter(fs.walkfiles(path)))
+ except StopIteration:
+ return False
+ return True
+@@ -426,7 +426,7 @@ def find_duplicates(fs,
+ for path in compare_paths:
+ file_sizes[fs.getsize(path)].append(path)
+
+- size_duplicates = [paths for paths in file_sizes.itervalues() if len(paths) > 1]
++ size_duplicates = [paths for paths in file_sizes.values() if len(paths) > 1]
+
+ signatures = defaultdict(list)
+
+@@ -453,7 +453,7 @@ def find_duplicates(fs,
+ # If 'quick' is True then the signature comparison is adequate (although
+ # it may result in false positives)
+ if quick:
+- for paths in signatures.itervalues():
++ for paths in signatures.values():
+ if len(paths) > 1:
+ yield paths
+ return
+@@ -482,7 +482,7 @@ def find_duplicates(fs,
+ # byte by byte.
+ # All path groups in this loop have the same size and same signature, so are
+ # highly likely to be identical.
+- for paths in signatures.itervalues():
++ for paths in signatures.values():
+
+ while len(paths) > 1:
+
+@@ -535,7 +535,7 @@ def print_fs(fs,
+ if file_out is None:
+ file_out = sys.stdout
+
+- file_encoding = getattr(file_out, 'encoding', u'utf-8') or u'utf-8'
++ file_encoding = getattr(file_out, 'encoding', 'utf-8') or 'utf-8'
+ file_encoding = file_encoding.upper()
+
+ if terminal_colors is None:
+@@ -546,44 +546,44 @@ def print_fs(fs,
+
+ def write(line):
+ if PY3:
+- file_out.write((line + u'\n'))
++ file_out.write((line + '\n'))
+ else:
+- file_out.write((line + u'\n').encode(file_encoding, 'replace'))
++ file_out.write((line + '\n').encode(file_encoding, 'replace'))
+
+ def wrap_prefix(prefix):
+ if not terminal_colors:
+ return prefix
+- return u'\x1b[32m%s\x1b[0m' % prefix
++ return '\x1b[32m%s\x1b[0m' % prefix
+
+ def wrap_dirname(dirname):
+ if not terminal_colors:
+ return dirname
+- return u'\x1b[1;34m%s\x1b[0m' % dirname
++ return '\x1b[1;34m%s\x1b[0m' % dirname
+
+ def wrap_error(msg):
+ if not terminal_colors:
+ return msg
+- return u'\x1b[31m%s\x1b[0m' % msg
++ return '\x1b[31m%s\x1b[0m' % msg
+
+ def wrap_filename(fname):
+ if not terminal_colors:
+ return fname
+- if fname.startswith(u'.'):
+- fname = u'\x1b[33m%s\x1b[0m' % fname
++ if fname.startswith('.'):
++ fname = '\x1b[33m%s\x1b[0m' % fname
+ return fname
+ dircount = [0]
+ filecount = [0]
+ def print_dir(fs, path, levels=[]):
+ if file_encoding == 'UTF-8' and terminal_colors:
+- char_vertline = u'│'
+- char_newnode = u'├'
+- char_line = u'──'
+- char_corner = u'╰'
++ char_vertline = '│'
++ char_newnode = '├'
++ char_line = '──'
++ char_corner = '╰'
+ else:
+- char_vertline = u'|'
+- char_newnode = u'|'
+- char_line = u'--'
+- char_corner = u'`'
++ char_vertline = '|'
++ char_newnode = '|'
++ char_line = '--'
++ char_corner = '`'
+
+ try:
+ dirs = fs.listdir(path, dirs_only=True)
+@@ -593,18 +593,18 @@ def print_fs(fs,
+ files = fs.listdir(path, files_only=True, wildcard=files_wildcard)
+ dir_listing = ( [(True, p) for p in dirs] +
+ [(False, p) for p in files] )
+- except Exception, e:
++ except Exception as e:
+ prefix = ''.join([(char_vertline + ' ', ' ')[last] for last in levels]) + ' '
+- write(wrap_prefix(prefix[:-1] + ' ') + wrap_error(u"unable to retrieve directory list (%s) ..." % str(e)))
++ write(wrap_prefix(prefix[:-1] + ' ') + wrap_error("unable to retrieve directory list (%s) ..." % str(e)))
+ return 0
+
+ if hide_dotfiles:
+ dir_listing = [(isdir, p) for isdir, p in dir_listing if not p.startswith('.')]
+
+ if dirs_first:
+- dir_listing.sort(key = lambda (isdir, p):(not isdir, p.lower()))
++ dir_listing.sort(key = lambda isdir_p:(not isdir_p[0], isdir_p[1].lower()))
+ else:
+- dir_listing.sort(key = lambda (isdir, p):p.lower())
++ dir_listing.sort(key = lambda isdir_p1:isdir_p1[1].lower())
+
+ for i, (is_dir, item) in enumerate(dir_listing):
+ if is_dir:
+@@ -685,9 +685,9 @@ if __name__ == "__main__":
+ t1.tree()
+
+ t2 = TempFS()
+- print t2.listdir()
++ print(t2.listdir())
+ movedir(t1, t2)
+
+- print t2.listdir()
++ print(t2.listdir())
+ t1.tree()
+ t2.tree()
+--- fs/watch.py.orig 2022-03-04 17:14:43 UTC
++++ fs/watch.py
+@@ -32,7 +32,7 @@ an iterator over the change events.
+ import sys
+ import weakref
+ import threading
+-import Queue
++import queue
+ import traceback
+
+ from fs.path import *
+@@ -54,10 +54,10 @@ class EVENT(object):
+ self.path = path
+
+ def __str__(self):
+- return unicode(self).encode("utf8")
++ return str(self).encode("utf8")
+
+ def __unicode__(self):
+- return u"<fs.watch.%s object (path='%s') at %s>" % (self.__class__.__name__,self.path,hex(id(self)))
++ return "<fs.watch.%s object (path='%s') at %s>" % (self.__class__.__name__,self.path,hex(id(self)))
+
+ def clone(self,fs=None,path=None):
+ if fs is None:
+@@ -102,7 +102,7 @@ class MOVED_DST(EVENT):
+ self.source = source
+
+ def __unicode__(self):
+- return u"<fs.watch.%s object (path=%r,src=%r) at %s>" % (self.__class__.__name__,self.path,self.source,hex(id(self)))
++ return "<fs.watch.%s object (path=%r,src=%r) at %s>" % (self.__class__.__name__,self.path,self.source,hex(id(self)))
+
+ def clone(self,fs=None,path=None,source=None):
+ evt = super(MOVED_DST,self).clone(fs,path)
+@@ -120,7 +120,7 @@ class MOVED_SRC(EVENT):
+ self.destination = destination
+
+ def __unicode__(self):
+- return u"<fs.watch.%s object (path=%r,dst=%r) at %s>" % (self.__class__.__name__,self.path,self.destination,hex(id(self)))
++ return "<fs.watch.%s object (path=%r,dst=%r) at %s>" % (self.__class__.__name__,self.path,self.destination,hex(id(self)))
+
+ def clone(self,fs=None,path=None,destination=None):
+ evt = super(MOVED_SRC,self).clone(fs,path)
+@@ -182,7 +182,7 @@ class Watcher(object):
+ try:
+ self.callback(event)
+ except Exception:
+- print >>sys.stderr, "error in FS watcher callback", self.callback
++ print("error in FS watcher callback", self.callback, file=sys.stderr)
+ traceback.print_exc()
+
+
+@@ -213,7 +213,7 @@ class WatchableFSMixin(FS):
+ if isinstance(watcher_or_callback,Watcher):
+ self._watchers[watcher_or_callback.path].remove(watcher_or_callback)
+ else:
+- for watchers in self._watchers.itervalues():
++ for watchers in self._watchers.values():
+ for i,watcher in enumerate(watchers):
+ if watcher.callback is watcher_or_callback:
+ del watchers[i]
+@@ -221,7 +221,7 @@ class WatchableFSMixin(FS):
+
+ def _find_watchers(self,callback):
+ """Find watchers registered with the given callback."""
+- for watchers in self._watchers.itervalues():
++ for watchers in self._watchers.values():
+ for watcher in watchers:
+ if watcher.callback is callback:
+ yield watcher
+@@ -235,7 +235,7 @@ class WatchableFSMixin(FS):
+ if path is None:
+ path = event.path
+ if path is None:
+- for watchers in self._watchers.itervalues():
++ for watchers in self._watchers.values():
+ for watcher in watchers:
+ watcher.handle_event(event)
+ else:
+@@ -443,7 +443,7 @@ class WatchableFS(WatchableFSMixin,WrapFS):
+
+ def _post_move(self,src,dst,data):
+ (src_paths,dst_paths) = data
+- for src_path,isdir in sorted(src_paths.items(),reverse=True):
++ for src_path,isdir in sorted(list(src_paths.items()),reverse=True):
+ path = pathjoin(src,src_path)
+ self.notify_watchers(REMOVED,path)
+
+@@ -554,7 +554,7 @@ class PollingWatchableFS(WatchableFS):
+ else:
+ was_accessed = False
+ was_modified = False
+- for (k,v) in new_info.iteritems():
++ for (k,v) in new_info.items():
+ if k not in old_info:
+ was_modified = True
+ break
+@@ -612,7 +612,7 @@ class iter_changes(object):
+
+ def __init__(self,fs=None,path="/",events=None,**kwds):
+ self.closed = False
+- self._queue = Queue.Queue()
++ self._queue = queue.Queue()
+ self._watching = set()
+ if fs is not None:
+ self.add_watcher(fs,path,events,**kwds)
+@@ -628,7 +628,7 @@ class iter_changes(object):
+ raise StopIteration
+ try:
+ event = self._queue.get(timeout=timeout)
+- except Queue.Empty:
++ except queue.Empty:
+ raise StopIteration
+ if event is None:
+ raise StopIteration
+--- fs/wrapfs/__init__.py.orig 2015-04-12 17:24:29 UTC
++++ fs/wrapfs/__init__.py
+@@ -32,12 +32,12 @@ def rewrite_errors(func):
+ def wrapper(self,*args,**kwds):
+ try:
+ return func(self,*args,**kwds)
+- except ResourceError, e:
++ except ResourceError as e:
+ (exc_type,exc_inst,tb) = sys.exc_info()
+ try:
+ e.path = self._decode(e.path)
+ except (AttributeError, ValueError, TypeError):
+- raise e, None, tb
++ raise e.with_traceback(tb)
+ raise
+ return wrapper
+
+@@ -119,7 +119,7 @@ class WrapFS(FS):
+ return (mode, mode)
+
+ def __unicode__(self):
+- return u"<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,)
++ return "<%s: %s>" % (self.__class__.__name__,self.wrapped_fs,)
+
+ #def __str__(self):
+ # return unicode(self).encode(sys.getdefaultencoding(),"replace")
+--- fs/wrapfs/debugfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/wrapfs/debugfs.py
+@@ -66,7 +66,7 @@ class DebugFS(object):
+ logger.log(level, message)
+
+ def __parse_param(self, value):
+- if isinstance(value, basestring):
++ if isinstance(value, str):
+ if len(value) > 60:
+ value = "%s ... (length %d)" % (repr(value[:60]), len(value))
+ else:
+@@ -75,7 +75,7 @@ class DebugFS(object):
+ value = "%s (%d items)" % (repr(value[:3]), len(value))
+ elif isinstance(value, dict):
+ items = {}
+- for k, v in value.items()[:3]:
++ for k, v in list(value.items())[:3]:
+ items[k] = v
+ value = "%s (%d items)" % (repr(items), len(value))
+ else:
+@@ -84,7 +84,7 @@ class DebugFS(object):
+
+ def __parse_args(self, *arguments, **kwargs):
+ args = [self.__parse_param(a) for a in arguments]
+- for k, v in kwargs.items():
++ for k, v in list(kwargs.items()):
+ args.append("%s=%s" % (k, self.__parse_param(v)))
+
+ args = ','.join(args)
+@@ -105,10 +105,10 @@ class DebugFS(object):
+
+ try:
+ attr = getattr(self.__wrapped_fs, key)
+- except AttributeError, e:
++ except AttributeError as e:
+ self.__log(DEBUG, "Asking for not implemented method %s" % key)
+ raise e
+- except Exception, e:
++ except Exception as e:
+ self.__log(CRITICAL, "Exception %s: %s" % \
+ (e.__class__.__name__, str(e)))
+ raise e
+@@ -122,19 +122,19 @@ class DebugFS(object):
+ try:
+ value = attr(*args, **kwargs)
+ self.__report("Call method", key, value, *args, **kwargs)
+- except FSError, e:
++ except FSError as e:
+ self.__log(ERROR, "Call method %s%s -> Exception %s: %s" % \
+ (key, self.__parse_args(*args, **kwargs), \
+ e.__class__.__name__, str(e)))
+ (exc_type,exc_inst,tb) = sys.exc_info()
+- raise e, None, tb
+- except Exception, e:
++ raise e.with_traceback(tb)
++ except Exception as e:
+ self.__log(CRITICAL,
+ "Call method %s%s -> Non-FS exception %s: %s" %\
+ (key, self.__parse_args(*args, **kwargs), \
+ e.__class__.__name__, str(e)))
+ (exc_type,exc_inst,tb) = sys.exc_info()
+- raise e, None, tb
++ raise e.with_traceback(tb)
+ return value
+
+ if self.__verbose:
+--- fs/wrapfs/hidedotfilesfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/wrapfs/hidedotfilesfs.py
+@@ -87,7 +87,7 @@ class HideDotFilesFS(WrapFS):
+ path = normpath(path)
+ iter_dir = iter(self.listdir(path,hidden=True))
+ try:
+- iter_dir.next()
++ next(iter_dir)
+ except StopIteration:
+ return True
+ return False
+--- fs/wrapfs/lazyfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/wrapfs/lazyfs.py
+@@ -39,14 +39,14 @@ class LazyFS(WrapFS):
+ # It appears that python2.5 has trouble printing out
+ # classes that define a __unicode__ method.
+ try:
+- return u"<LazyFS: %s>" % (self._fsclass,)
++ return "<LazyFS: %s>" % (self._fsclass,)
+ except TypeError:
+ try:
+- return u"<LazyFS: %s>" % (self._fsclass.__name__,)
++ return "<LazyFS: %s>" % (self._fsclass.__name__,)
+ except AttributeError:
+- return u"<LazyFS: <unprintable>>"
++ return "<LazyFS: <unprintable>>"
+ else:
+- return u"<LazyFS: %s>" % (wrapped_fs,)
++ return "<LazyFS: %s>" % (wrapped_fs,)
+
+ def __getstate__(self):
+ state = super(LazyFS,self).__getstate__()
+--- fs/wrapfs/limitsizefs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/wrapfs/limitsizefs.py
+@@ -9,7 +9,7 @@ total size of files stored in the wrapped FS.
+
+ """
+
+-from __future__ import with_statement
++
+
+ from fs.errors import *
+ from fs.path import *
+--- fs/wrapfs/subfs.py.orig 2022-03-04 17:14:43 UTC
++++ fs/wrapfs/subfs.py
+@@ -34,7 +34,7 @@ class SubFS(WrapFS):
+ return '<SubFS: %s/%s>' % (self.wrapped_fs, self.sub_dir.lstrip('/'))
+
+ def __unicode__(self):
+- return u'<SubFS: %s/%s>' % (self.wrapped_fs, self.sub_dir.lstrip('/'))
++ return '<SubFS: %s/%s>' % (self.wrapped_fs, self.sub_dir.lstrip('/'))
+
+ def __repr__(self):
+ return "SubFS(%r, %r)" % (self.wrapped_fs, self.sub_dir)
+--- fs/xattrs.py.orig 2015-04-12 17:24:29 UTC
++++ fs/xattrs.py
+@@ -23,7 +23,7 @@ if it has native xattr support, and return a wrapped v
+
+ import sys
+ try:
+- import cPickle as pickle
++ import pickle as pickle
+ except ImportError:
+ import pickle
+
+@@ -104,7 +104,7 @@ class SimulateXAttr(WrapFS):
+ """Set an extended attribute on the given path."""
+ if not self.exists(path):
+ raise ResourceNotFoundError(path)
+- key = unicode(key)
++ key = str(key)
+ attrs = self._get_attr_dict(path)
+ attrs[key] = str(value)
+ self._set_attr_dict(path, attrs)
+@@ -133,7 +133,7 @@ class SimulateXAttr(WrapFS):
+ """List all the extended attribute keys set on the given path."""
+ if not self.exists(path):
+ raise ResourceNotFoundError(path)
+- return self._get_attr_dict(path).keys()
++ return list(self._get_attr_dict(path).keys())
+
+ def _encode(self,path):
+ """Prevent requests for operations on .xattr files."""
+@@ -189,7 +189,7 @@ class SimulateXAttr(WrapFS):
+ d_attr_file = self._get_attr_path(dst)
+ try:
+ self.wrapped_fs.copy(s_attr_file,d_attr_file,overwrite=True)
+- except ResourceNotFoundError,e:
++ except ResourceNotFoundError as e:
+ pass
+
+ def move(self,src,dst,**kwds):
+--- fs/zipfs.py.orig 2015-04-12 17:25:37 UTC
++++ fs/zipfs.py
+@@ -16,9 +16,9 @@ from fs.filelike import StringIO
+ from fs import iotools
+
+ from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED, BadZipfile, LargeZipFile
+-from memoryfs import MemoryFS
++from .memoryfs import MemoryFS
+
+-import tempfs
++from . import tempfs
+
+ from six import PY3
+
+@@ -74,7 +74,7 @@ class _ExceptionProxy(object):
+ def __setattr__(self, name, value):
+ raise ValueError("Zip file has been closed")
+
+- def __nonzero__(self):
++ def __bool__(self):
+ return False
+
+
+@@ -117,7 +117,7 @@ class ZipFS(FS):
+ self.zip_mode = mode
+ self.encoding = encoding
+
+- if isinstance(zip_file, basestring):
++ if isinstance(zip_file, str):
+ zip_file = os.path.expanduser(os.path.expandvars(zip_file))
+ zip_file = os.path.normpath(os.path.abspath(zip_file))
+ self._zip_file_string = True
+@@ -126,10 +126,10 @@ class ZipFS(FS):
+
+ try:
+ self.zf = ZipFile(zip_file, mode, compression_type, allow_zip_64)
+- except BadZipfile, bzf:
++ except BadZipfile as bzf:
+ raise ZipOpenError("Not a zip file or corrupt (%s)" % str(zip_file),
+ details=bzf)
+- except IOError, ioe:
++ except IOError as ioe:
+ if str(ioe).startswith('[Errno 22] Invalid argument'):
+ raise ZipOpenError("Not a zip file or corrupt (%s)" % str(zip_file),
+ details=ioe)
+@@ -151,7 +151,7 @@ class ZipFS(FS):
+ return "<ZipFS: %s>" % self.zip_path
+
+ def __unicode__(self):
+- return u"<ZipFS: %s>" % self.zip_path
++ return "<ZipFS: %s>" % self.zip_path
+
+ def _decode_path(self, path):
+ if PY3:
+@@ -280,7 +280,7 @@ class ZipFS(FS):
+ try:
+ zi = self.zf.getinfo(self._encode_path(path))
+ zinfo = dict((attrib, getattr(zi, attrib)) for attrib in dir(zi) if not attrib.startswith('_'))
+- for k, v in zinfo.iteritems():
++ for k, v in zinfo.items():
+ if callable(v):
+ zinfo[k] = v()
+ except KeyError:
+--- setup.py.orig 2015-11-14 11:44:01 UTC
++++ setup.py
+@@ -38,8 +38,6 @@ with open('README.txt', 'r') as f:
+
+
+ extra = {}
+-if PY3:
+- extra["use_2to3"] = True
+
+ setup(install_requires=['setuptools', 'six'],
+ name='fs',
diff --git a/filesystems/py-fs/pkg-descr b/filesystems/py-fs/pkg-descr
new file mode 100644
index 000000000000..712dede347d6
--- /dev/null
+++ b/filesystems/py-fs/pkg-descr
@@ -0,0 +1,8 @@
+Pyfilesystem is a Python module that provides a simplified common interface to
+many types of filesystem. Filesystems exposed via Pyfilesystem can also be
+served over the network, or 'mounted' on the native filesystem.
+
+Pyfilesystem simplifies working directories and paths, even if you only intend
+to work with local files. Differences in path formats between platforms are
+abstracted away, and you can write code that sand-boxes any changes to a given
+directory.