diff --git a/awx/lib/site-packages/README b/awx/lib/site-packages/README index 0506812d4c..65613ba7e2 100644 --- a/awx/lib/site-packages/README +++ b/awx/lib/site-packages/README @@ -54,7 +54,7 @@ rackspace-novaclient==1.4 (no files) rax-default-network-flags-python-novaclient-ext==0.2.3 (rax_default_network_flags_python_novaclient_ext/*) rax-scheduled-images-python-novaclient-ext==0.2.1 (rax_scheduled_images_python_novaclient_ext/*) requests==2.3.0 (requests/*) -setuptools==2.2 (setuptools/*, _markerlib/*, pkg_resources.py, easy_install.py, excluded bin/easy_install*) +setuptools==12.0.5 (setuptools/*, _markerlib/*, pkg_resources/*, easy_install.py) simplejson==3.6.0 (simplejson/*, excluded simplejson/_speedups.so) six==1.7.3 (six.py) South==0.8.4 (south/*) diff --git a/awx/lib/site-packages/pkg_resources.py b/awx/lib/site-packages/pkg_resources/__init__.py similarity index 74% rename from awx/lib/site-packages/pkg_resources.py rename to awx/lib/site-packages/pkg_resources/__init__.py index bde30989fe..c0c095b253 100644 --- a/awx/lib/site-packages/pkg_resources.py +++ b/awx/lib/site-packages/pkg_resources/__init__.py @@ -14,8 +14,11 @@ The package resource API is designed to work with normal filesystem packages, method. """ +from __future__ import absolute_import + import sys import os +import io import time import re import imp @@ -29,30 +32,27 @@ import token import symbol import operator import platform +import collections +import plistlib +import email.parser +import tempfile from pkgutil import get_importer -try: - from urlparse import urlparse, urlunparse -except ImportError: +PY3 = sys.version_info > (3,) +PY2 = not PY3 + +if PY3: from urllib.parse import urlparse, urlunparse -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset -try: - basestring - next = lambda o: o.next() - from cStringIO import StringIO as BytesIO -except NameError: - basestring = str - from io import BytesIO - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - exec(compile(open(fn).read(), fn, 'exec'), globs, locs) +if PY2: + from urlparse import urlparse, urlunparse + +if PY3: + string_types = str, +else: + string_types = str, eval('unicode') + +iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems() # capture these to bypass sandboxing from os import utime @@ -77,23 +77,142 @@ try: except ImportError: pass -def _bypass_ensure_directory(name, mode=0x1FF): # 0777 - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) +try: + import pkg_resources._vendor.packaging.version + import pkg_resources._vendor.packaging.specifiers + packaging = pkg_resources._vendor.packaging +except ImportError: + # fallback to naturally-installed version; allows system packagers to + # omit vendored packages. + import packaging.version + import packaging.specifiers + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +class _SetuptoolsVersionMixin(object): + + def __hash__(self): + return super(_SetuptoolsVersionMixin, self).__hash__() + + def __lt__(self, other): + if isinstance(other, tuple): + return tuple(self) < other + else: + return super(_SetuptoolsVersionMixin, self).__lt__(other) + + def __le__(self, other): + if isinstance(other, tuple): + return tuple(self) <= other + else: + return super(_SetuptoolsVersionMixin, self).__le__(other) + + def __eq__(self, other): + if isinstance(other, tuple): + return tuple(self) == other + else: + return super(_SetuptoolsVersionMixin, self).__eq__(other) + + def __ge__(self, other): + if isinstance(other, tuple): + return tuple(self) >= other + else: + return super(_SetuptoolsVersionMixin, self).__ge__(other) + + def __gt__(self, other): + if isinstance(other, tuple): + return tuple(self) > other + else: + return super(_SetuptoolsVersionMixin, self).__gt__(other) + + def __ne__(self, other): + if isinstance(other, tuple): + return tuple(self) != other + else: + return super(_SetuptoolsVersionMixin, self).__ne__(other) + + def __getitem__(self, key): + return tuple(self)[key] + + def __iter__(self): + component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) + replace = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + }.get + + def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part == '.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*'+part + + # ensure that alpha/beta/candidate are before final + yield '*final' + + def old_parse_version(s): + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == '00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + # Warn for use of this function + warnings.warn( + "You have iterated over the result of " + "pkg_resources.parse_version. This is a legacy behavior which is " + "inconsistent with the new version class introduced in setuptools " + "8.0. That class should be used directly instead of attempting to " + "iterate over the result.", + RuntimeWarning, + stacklevel=1, + ) + + for part in old_parse_version(str(self)): + yield part + + +class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): + pass + + +class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, + packaging.version.LegacyVersion): + pass + + +def parse_version(v): + try: + return SetuptoolsVersion(v) + except packaging.version.InvalidVersion: + return SetuptoolsLegacyVersion(v) _state_vars = {} def _declare_state(vartype, **kw): - g = globals() - for name, val in kw.items(): - g[name] = val - _state_vars[name] = vartype + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) def __getstate__(): state = {} @@ -143,13 +262,15 @@ def get_supported_platform(): try: plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) except ValueError: - pass # not Mac OS X + # not Mac OS X + pass return plat __all__ = [ # Basic resource access and distribution/entry point discovery 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', 'resource_string', 'resource_stream', 'resource_filename', 'resource_listdir', 'resource_exists', 'resource_isdir', @@ -163,8 +284,11 @@ __all__ = [ 'Distribution', 'Requirement', 'EntryPoint', # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', # Parsing functions and string utilities 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', @@ -193,8 +317,51 @@ class ResolutionError(Exception): def __repr__(self): return self.__class__.__name__+repr(self.args) + class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + class DistributionNotFound(ResolutionError): """A requested distribution was not found""" @@ -221,7 +388,7 @@ def register_loader_type(loader_type, provider_factory): def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): + if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try: module = sys.modules[moduleOrReq] @@ -233,11 +400,9 @@ def get_provider(moduleOrReq): def _macosx_vers(_cache=[]): if not _cache: - import platform version = platform.mac_ver()[0] # fallback for MacPorts if version == '': - import plistlib plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): if hasattr(plistlib, 'readPlist'): @@ -249,7 +414,7 @@ def _macosx_vers(_cache=[]): return _cache[0] def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) def get_build_platform(): """Return this platform's string for platform-specific distributions @@ -278,10 +443,11 @@ def get_build_platform(): macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat +# XXX backward compat +get_platform = get_build_platform -def compatible_platforms(provided,required): +def compatible_platforms(provided, required): """Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. @@ -289,7 +455,8 @@ def compatible_platforms(provided,required): XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided is None or required is None or provided==required: - return True # easy case + # easy case + return True # Mac OS X special cases reqMac = macosVersionString.match(required) @@ -307,13 +474,9 @@ def compatible_platforms(provided,required): macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) return True - return False # egg isn't macosx or legacy darwin + # egg isn't macosx or legacy darwin + return False # are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or \ @@ -338,13 +501,16 @@ def run_script(dist_spec, script_name): ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns) -run_main = run_script # backward compatibility +# backward compatibility +run_main = run_script def get_distribution(dist): """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): + if isinstance(dist, string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist @@ -429,6 +595,48 @@ class WorkingSet(object): for entry in entries: self.add_entry(entry) + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + def add_entry(self, entry): """Add a path item to ``.entries``, finding any distributions on it @@ -444,7 +652,7 @@ class WorkingSet(object): for dist in find_distributions(entry, True): self.add(dist, entry, False) - def __contains__(self,dist): + def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist @@ -460,9 +668,9 @@ class WorkingSet(object): """ dist = self.by_key.get(req.key) if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist + # XXX add more info + raise VersionConflict(dist, req) + return dist def iter_entry_points(self, group, name=None): """Yield entry point objects from `group` matching `name` @@ -524,7 +732,8 @@ class WorkingSet(object): keys = self.entry_keys.setdefault(entry,[]) keys2 = self.entry_keys.setdefault(dist.location,[]) if not replace and dist.key in self.by_key: - return # ignore hidden distros + # ignore hidden distros + return self.by_key[dist.key] = dist if dist.key not in keys: @@ -552,13 +761,21 @@ class WorkingSet(object): it. """ - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} to_activate = [] + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + while requirements: - req = requirements.pop(0) # process dependencies breadth-first + # process dependencies breadth-first + req = requirements.pop(0) if req in processed: # Ignore cyclic or redundant dependencies continue @@ -589,11 +806,21 @@ class WorkingSet(object): to_activate.append(dist) if dist not in req: # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + processed[req] = True - return to_activate # return list of distros to activate + # return list of distros to activate + return to_activate def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): @@ -604,8 +831,10 @@ class WorkingSet(object): distributions, errors = working_set.find_plugins( Environment(plugin_dirlist) ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) The `plugin_env` should be an ``Environment`` instance that contains only distributions that are in the project's "plugin directory" or @@ -630,7 +859,8 @@ class WorkingSet(object): """ plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order + # scan project names in alphabetic order + plugin_projects.sort() error_info = {} distributions = {} @@ -642,7 +872,8 @@ class WorkingSet(object): env = full_env + plugin_env shadow_set = self.__class__([]) - list(map(shadow_set.add, self)) # put all our entries in shadow_set + # put all our entries in shadow_set + list(map(shadow_set.add, self)) for project_name in plugin_projects: @@ -653,13 +884,15 @@ class WorkingSet(object): try: resolvees = shadow_set.resolve(req, env, installer) - except ResolutionError: - v = sys.exc_info()[1] - error_info[dist] = v # save error info + except ResolutionError as v: + # save error info + error_info[dist] = v if fallback: - continue # try the next older version of project + # try the next older version of project + continue else: - break # give up on this project, keep going + # give up on this project, keep going + break else: list(map(shadow_set.add, resolvees)) @@ -718,7 +951,8 @@ class WorkingSet(object): class Environment(object): """Searchable snapshot of distributions on a search path""" - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -736,7 +970,6 @@ class Environment(object): running platform or Python version. """ self._distmap = {} - self._cache = {} self.platform = platform self.python = python self.scan(search_path) @@ -750,7 +983,7 @@ class Environment(object): """ return (self.python is None or dist.py_version is None or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) + and compatible_platforms(dist.platform, self.platform) def remove(self, dist): """Remove `dist` from the environment""" @@ -771,30 +1004,25 @@ class Environment(object): for dist in find_distributions(item): self.add(dist) - def __getitem__(self,project_name): + def __getitem__(self, project_name): """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) + dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) def best_match(self, req, working_set, installer=None): """Find distribution best matching `req` and usable on `working_set` @@ -815,7 +1043,8 @@ class Environment(object): for dist in self[req.key]: if dist in req: return dist - return self.obtain(req, installer) # try and download/install + # try to download/install + return self.obtain(req, installer) def obtain(self, requirement, installer=None): """Obtain a distribution matching `requirement` (e.g. via download) @@ -832,13 +1061,14 @@ class Environment(object): def __iter__(self): """Yield the unique project names of the available distributions""" for key in self._distmap.keys(): - if self[key]: yield key + if self[key]: + yield key def __iadd__(self, other): """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): + if isinstance(other, Distribution): self.add(other) - elif isinstance(other,Environment): + elif isinstance(other, Environment): for project in other: for dist in other[project]: self.add(dist) @@ -854,7 +1084,8 @@ class Environment(object): return new -AvailableDistributions = Environment # XXX backward compatibility +# XXX backward compatibility +AvailableDistributions = Environment class ExtractionError(RuntimeError): @@ -1007,7 +1238,7 @@ variable to point to an accessible directory. if os.name == 'posix': # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777 + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 os.chmod(tempname, mode) def set_extraction_path(self, path): @@ -1064,14 +1295,17 @@ def get_default_cache(): if os.name!='nt': return os.path.expanduser('~/.python-eggs') - app_data = 'Application Data' # XXX this may be locale-specific! + # XXX this may be locale-specific! + app_data = 'Application Data' app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe + # best option, should be locale-safe + (('APPDATA',), None), (('USERPROFILE',), app_data), (('HOMEDRIVE','HOMEPATH'), app_data), (('HOMEPATH',), app_data), (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME + # 95/98/ME + (('WINDIR',), app_data), ] for keys, subdir in app_homes: @@ -1083,7 +1317,7 @@ def get_default_cache(): break else: if subdir: - dirname = os.path.join(dirname,subdir) + dirname = os.path.join(dirname, subdir) return os.path.join(dirname, 'Python-Eggs') else: raise RuntimeError( @@ -1099,13 +1333,15 @@ def safe_name(name): def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) def safe_extra(extra): @@ -1129,8 +1365,8 @@ class MarkerEvaluation(object): values = { 'os_name': lambda: os.name, 'sys_platform': lambda: sys.platform, - 'python_full_version': lambda: sys.version.split()[0], - 'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]), + 'python_full_version': platform.python_version, + 'python_version': lambda: platform.python_version()[:3], 'platform_version': platform.version, 'platform_machine': platform.machine, 'python_implementation': platform.python_implementation, @@ -1144,16 +1380,18 @@ class MarkerEvaluation(object): """ try: cls.evaluate_marker(text) - except SyntaxError: - return cls.normalize_exception(sys.exc_info()[1]) + except SyntaxError as e: + return cls.normalize_exception(e) return False @staticmethod def normalize_exception(exc): """ - Given a SyntaxError from a marker evaluation, normalize the error message: + Given a SyntaxError from a marker evaluation, normalize the error + message: - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error messages. + - Replace platform-specific error messages with standard error + messages. """ subs = { 'unexpected EOF while parsing': 'invalid syntax', @@ -1167,12 +1405,20 @@ class MarkerEvaluation(object): @classmethod def and_test(cls, nodelist): # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.and_, items) @classmethod def test(cls, nodelist): # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.or_, items) @classmethod def atom(cls, nodelist): @@ -1181,12 +1427,14 @@ class MarkerEvaluation(object): if nodelist[2][0] == token.RPAR: raise SyntaxError("Empty parentheses") return cls.interpret(nodelist[2]) - raise SyntaxError("Language feature not supported in environment markers") + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) @classmethod def comparison(cls, nodelist): - if len(nodelist)>4: - raise SyntaxError("Chained comparison not allowed in environment markers") + if len(nodelist) > 4: + msg = "Chained comparison not allowed in environment markers" + raise SyntaxError(msg) comp = nodelist[2][1] cop = comp[1] if comp[0] == token.NAME: @@ -1198,7 +1446,8 @@ class MarkerEvaluation(object): try: cop = cls.get_op(cop) except KeyError: - raise SyntaxError(repr(cop)+" operator not allowed in environment markers") + msg = repr(cop) + " operator not allowed in environment markers" + raise SyntaxError(msg) return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) @classmethod @@ -1224,7 +1473,8 @@ class MarkerEvaluation(object): Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. - This implementation uses the 'parser' module, which is not implemented on + This implementation uses the 'parser' module, which is not implemented + on Jython and has been superseded by the 'ast' module in Python 2.6 and later. """ @@ -1246,8 +1496,7 @@ class MarkerEvaluation(object): env[new_key] = env.pop(key) try: result = _markerlib.interpret(text, env) - except NameError: - e = sys.exc_info()[1] + except NameError as e: raise SyntaxError(e.args[0]) return result @@ -1278,12 +1527,21 @@ class MarkerEvaluation(object): return op() if kind==token.STRING: s = nodelist[1] - if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \ - or '\\' in s: + if not cls._safe_string(s): raise SyntaxError( "Only plain strings allowed in environment markers") return s[1:-1] - raise SyntaxError("Language feature not supported in environment markers") + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @staticmethod + def _safe_string(cand): + return ( + cand[:1] in "'\"" and + not cand.startswith('"""') and + not cand.startswith("'''") and + '\\' not in cand + ) invalid_marker = MarkerEvaluation.is_invalid_marker evaluate_marker = MarkerEvaluation.evaluate_marker @@ -1303,7 +1561,7 @@ class NullProvider: return self._fn(self.module_path, resource_name) def get_resource_stream(self, manager, resource_name): - return BytesIO(self.get_resource_string(manager, resource_name)) + return io.BytesIO(self.get_resource_string(manager, resource_name)) def get_resource_string(self, manager, resource_name): return self._get(self._fn(self.module_path, resource_name)) @@ -1312,52 +1570,54 @@ class NullProvider: return self._has(self._fn(self.module_path, resource_name)) def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) + return self.egg_info and self._has(self._fn(self.egg_info, name)) if sys.version_info <= (3,): def get_metadata(self, name): if not self.egg_info: return "" - return self._get(self._fn(self.egg_info,name)) + return self._get(self._fn(self.egg_info, name)) else: def get_metadata(self, name): if not self.egg_info: return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") + return self._get(self._fn(self.egg_info, name)).decode("utf-8") def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) - def resource_isdir(self,resource_name): + def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) - def metadata_listdir(self,name): + def metadata_listdir(self, name): if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) + return self._listdir(self._fn(self.egg_info, name)) return [] - def run_script(self,script_name,namespace): + def run_script(self, script_name, namespace): script = 'scripts/'+script_name if not self.has_metadata(script): raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) namespace['__file__'] = script_filename if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) else: from linecache import cache cache[script_filename] = ( len(script_text), 0, script_text.split('\n'), script_filename ) - script_code = compile(script_text,script_filename,'exec') + script_code = compile(script_text, script_filename,'exec') exec(script_code, namespace, namespace) def _has(self, path): @@ -1393,8 +1653,8 @@ register_loader_type(object, NullProvider) class EggProvider(NullProvider): """Provider based on a virtual filesystem""" - def __init__(self,module): - NullProvider.__init__(self,module) + def __init__(self, module): + NullProvider.__init__(self, module) self._setup_prefix() def _setup_prefix(self): @@ -1417,21 +1677,18 @@ class DefaultProvider(EggProvider): def _has(self, path): return os.path.exists(path) - def _isdir(self,path): + def _isdir(self, path): return os.path.isdir(path) - def _listdir(self,path): + def _listdir(self, path): return os.listdir(path) def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') def _get(self, path): - stream = open(path, 'rb') - try: + with open(path, 'rb') as stream: return stream.read() - finally: - stream.close() register_loader_type(type(None), DefaultProvider) @@ -1442,9 +1699,9 @@ if importlib_bootstrap is not None: class EmptyProvider(NullProvider): """Provider that returns nothing for all requests""" - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] module_path = None def __init__(self): @@ -1453,47 +1710,81 @@ class EmptyProvider(NullProvider): empty_provider = EmptyProvider() -def build_zipmanifest(path): +class ZipManifests(dict): + """ + zip manifest builder """ - This builds a similar dictionary to the zipimport directory - caches. However instead of tuples, ZipInfo objects are stored. - The translation of the tuple is as follows: - * [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep - on pypy it is the same (one reason why distribute did work - in some cases on pypy and win32). - * [1] - zipinfo.compress_type - * [2] - zipinfo.compress_size - * [3] - zipinfo.file_size - * [4] - len(utf-8 encoding of filename) if zipinfo & 0x800 - len(ascii encoding of filename) otherwise - * [5] - (zipinfo.date_time[0] - 1980) << 9 | - zipinfo.date_time[1] << 5 | zipinfo.date_time[2] - * [6] - (zipinfo.date_time[3] - 1980) << 11 | - zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2) - * [7] - zipinfo.CRC + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): """ - zipinfo = dict() - zfile = zipfile.ZipFile(path) - #Got ZipFile has not __exit__ on python 3.1 - try: - for zitem in zfile.namelist(): - zpath = zitem.replace('/', os.sep) - zipinfo[zpath] = zfile.getinfo(zitem) - assert zipinfo[zpath] is not None - finally: - zfile.close() - return zipinfo + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) class ZipProvider(EggProvider): """Resource support for zips and eggs""" eagers = None + _zip_manifests = MemoizedZipManifests() def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = build_zipmanifest(self.loader.archive) + EggProvider.__init__(self, module) self.zip_pre = self.loader.archive+os.sep def _zipinfo_name(self, fspath): @@ -1502,18 +1793,23 @@ class ZipProvider(EggProvider): if fspath.startswith(self.zip_pre): return fspath[len(self.zip_pre):] raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) + "%s is not a subpath of %s" % (fspath, self.zip_pre) ) - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre+zip_path if fspath.startswith(self.egg_root+os.sep): return fspath[len(self.egg_root)+1:].split(os.sep) raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) + "%s is not a subpath of %s" % (fspath, self.egg_root) ) + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + def get_resource_filename(self, manager, resource_name): if not self.egg_name: raise NotImplementedError( @@ -1530,8 +1826,9 @@ class ZipProvider(EggProvider): @staticmethod def _get_date_and_size(zip_stat): size = zip_stat.file_size - date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst - #1980 offset already done + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done timestamp = time.mktime(date_time) return timestamp, size @@ -1542,7 +1839,8 @@ class ZipProvider(EggProvider): last = self._extract_resource( manager, os.path.join(zip_path, name) ) - return os.path.dirname(last) # return the extracted directory name + # return the extracted directory name + return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) @@ -1561,7 +1859,7 @@ class ZipProvider(EggProvider): outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) os.write(outf, self.loader.get_data(zip_path)) os.close(outf) - utime(tmpnam, (timestamp,timestamp)) + utime(tmpnam, (timestamp, timestamp)) manager.postprocess(tmpnam, real_path) try: @@ -1573,14 +1871,16 @@ class ZipProvider(EggProvider): # the file became current since it was checked above, # so proceed. return real_path - elif os.name=='nt': # Windows, del old file and retry + # Windows, del old file and retry + elif os.name=='nt': unlink(real_path) rename(tmpnam, real_path) return real_path raise except os.error: - manager.extraction_error() # report a user-friendly error + # report a user-friendly error + manager.extraction_error() return real_path @@ -1596,9 +1896,8 @@ class ZipProvider(EggProvider): return False # check that the contents match zip_contents = self.loader.get_data(zip_path) - f = open(file_path, 'rb') - file_contents = f.read() - f.close() + with open(file_path, 'rb') as f: + file_contents = f.read() return zip_contents == file_contents def _get_eager_resources(self): @@ -1631,17 +1930,17 @@ class ZipProvider(EggProvider): zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() - def _isdir(self,fspath): + def _isdir(self, fspath): return self._zipinfo_name(fspath) in self._index() - def _listdir(self,fspath): + def _listdir(self, fspath): return list(self._index().get(self._zipinfo_name(fspath), ())) - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) register_loader_type(zipimport.zipimporter, ZipProvider) @@ -1658,21 +1957,20 @@ class FileMetadata(EmptyProvider): the provided location. """ - def __init__(self,path): + def __init__(self, path): self.path = path - def has_metadata(self,name): + def has_metadata(self, name): return name=='PKG-INFO' - def get_metadata(self,name): + def get_metadata(self, name): if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() + with open(self.path,'rU') as f: + metadata = f.read() return metadata raise KeyError("No metadata except PKG-INFO is available") - def get_metadata_lines(self,name): + def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -1687,7 +1985,7 @@ class PathMetadata(DefaultProvider): base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: @@ -1707,7 +2005,6 @@ class EggMetadata(ZipProvider): def __init__(self, importer): """Create a metadata provider from a zipimporter""" - self.zipinfo = build_zipmanifest(importer.archive) self.zip_pre = importer.archive+os.sep self.loader = importer if importer.prefix: @@ -1746,7 +2043,8 @@ def find_eggs_in_zip(importer, path_item, only=False): if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: - return # don't yield nested distros + # don't yield nested distros + return for subitem in metadata.resource_listdir('/'): if subitem.endswith('.egg'): subpath = os.path.join(path_item, subitem) @@ -1757,7 +2055,7 @@ register_finder(zipimport.zipimporter, find_eggs_in_zip) def find_nothing(importer, path_item, only=False): return () -register_finder(object,find_nothing) +register_finder(object, find_nothing) def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" @@ -1783,23 +2081,24 @@ def find_on_path(importer, path_item, only=False): else: metadata = FileMetadata(fullpath) yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST + path_item, entry, metadata, precedence=DEVELOP_DIST ) elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): + dists = find_distributions(os.path.join(path_item, entry)) + for dist in dists: yield dist elif not only and lower.endswith('.egg-link'): - entry_file = open(os.path.join(path_item, entry)) - try: + with open(os.path.join(path_item, entry)) as entry_file: entry_lines = entry_file.readlines() - finally: - entry_file.close() for line in entry_lines: - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): + if not line.strip(): + continue + path = os.path.join(path_item, line.rstrip()) + dists = find_distributions(path) + for item in dists: yield item break -register_finder(pkgutil.ImpImporter,find_on_path) +register_finder(pkgutil.ImpImporter, find_on_path) if importlib_bootstrap is not None: register_finder(importlib_bootstrap.FileFinder, find_on_path) @@ -1814,7 +2113,7 @@ def register_namespace_handler(importer_type, namespace_handler): `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: - def namespace_handler(importer,path_entry,moduleName,module): + def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already @@ -1890,7 +2189,8 @@ def fixup_namespace_packages(path_item, parent=None): try: for package in _namespace_packages.get(parent,()): subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) + if subpath: + fixup_namespace_packages(subpath, package) finally: imp.release_lock() @@ -1906,8 +2206,8 @@ def file_ns_handler(importer, path_item, packageName, module): # Only return the path if it's not already there return subpath -register_namespace_handler(pkgutil.ImpImporter,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) if importlib_bootstrap is not None: register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler) @@ -1916,14 +2216,14 @@ if importlib_bootstrap is not None: def null_ns_handler(importer, path_item, packageName, module): return None -register_namespace_handler(object,null_ns_handler) +register_namespace_handler(object, null_ns_handler) def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(filename)) -def _normalize_cached(filename,_cache={}): +def _normalize_cached(filename, _cache={}): try: return _cache[filename] except KeyError: @@ -1939,22 +2239,28 @@ def _set_parent_ns(packageName): def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, string_types): for s in strs.splitlines(): s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments + # skip blank lines/comments + if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items +# whitespace and comment +LINE_END = re.compile(r"\s*(#.*)?$").match +# line continuation +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match +# Distribution or extra +DISTRO = re.compile(r"\s*((\w|[-.])+)").match +# ver. info +VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match +# comma between items +COMMA = re.compile(r"\s*,").match OBRACKET = re.compile(r"\s*\[").match CBRACKET = re.compile(r"\s*\]").match MODULE = re.compile(r"\w+(\.\w+)*$").match @@ -1964,62 +2270,7 @@ EGG_NAME = re.compile( re.VERBOSE | re.IGNORECASE ).match -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) class EntryPoint(object): """Object representing an advertised importable object""" @@ -2043,21 +2294,46 @@ class EntryPoint(object): def __repr__(self): return "EntryPoint.parse(%r)" % str(self) - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + DeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) def require(self, env=None, installer=None): if self.extras and not self.dist: raise UnknownExtra("Can't require() without a distribution", self) - list(map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer))) + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P.+?)\s*' + r'=\s*' + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) @classmethod def parse(cls, src, dist=None): @@ -2065,31 +2341,28 @@ class EntryPoint(object): Entry point syntax follows the form:: - name = some.module:some.attr [extra1,extra2] + name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras @classmethod def parse_group(cls, group, lines, dist=None): @@ -2107,7 +2380,7 @@ class EntryPoint(object): @classmethod def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" - if isinstance(data,dict): + if isinstance(data, dict): data = data.items() else: data = split_sections(data) @@ -2150,7 +2423,7 @@ class Distribution(object): self._provider = metadata or empty_provider @classmethod - def from_location(cls,location,basename,metadata=None,**kw): + def from_location(cls, location, basename, metadata=None,**kw): project_name, version, py_version, platform = [None]*4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: @@ -2166,30 +2439,38 @@ class Distribution(object): py_version=py_version, platform=platform, **kw ) - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), + @property + def hashcmp(self): + return ( + self.parsed_version, self.precedence, self.key, _remove_md5_fragment(self.location), self.py_version, - self.platform + self.platform, ) - ) - def __hash__(self): return hash(self.hashcmp) + + def __hash__(self): + return hash(self.hashcmp) + def __lt__(self, other): return self.hashcmp < other.hashcmp + def __le__(self, other): return self.hashcmp <= other.hashcmp + def __gt__(self, other): return self.hashcmp > other.hashcmp + def __ge__(self, other): return self.hashcmp >= other.hashcmp + def __eq__(self, other): if not isinstance(other, self.__class__): # It's not a Distribution, so they are not equal return False return self.hashcmp == other.hashcmp + def __ne__(self, other): return not self == other @@ -2207,11 +2488,29 @@ class Distribution(object): @property def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + if isinstance( + self._parsed_version, packaging.version.LegacyVersion): + # While an empty version is techincally a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if self.version: + warnings.warn( + "'%s (%s)' is being parsed as a legacy, non PEP 440, " + "version. You may find odd behavior and sort order. " + "In particular it will be sorted as less than 0.0. It " + "is recommend to migrate to PEP 440 compatible " + "versions." % ( + self.project_name, self.version, + ), + PEP440Warning, + ) + + return self._parsed_version @property def version(self): @@ -2223,9 +2522,8 @@ class Distribution(object): self._version = safe_version(line.split(':',1)[1].strip()) return self._version else: - raise ValueError( - "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self - ) + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) @property def _dep_map(self): @@ -2234,23 +2532,24 @@ class Distribution(object): except AttributeError: dm = self.__dep_map = {None: []} for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): + for extra, reqs in split_sections(self._get_metadata(name)): if extra: if ':' in extra: - extra, marker = extra.split(':',1) + extra, marker = extra.split(':', 1) if invalid_marker(marker): - reqs=[] # XXX warn + # XXX warn + reqs=[] elif not evaluate_marker(marker): reqs=[] extra = safe_extra(extra) or None dm.setdefault(extra,[]).extend(parse_requirements(reqs)) return dm - def requires(self,extras=()): + def requires(self, extras=()): """List of Requirements needed for this distro if `extras` are used""" dm = self._dep_map deps = [] - deps.extend(dm.get(None,())) + deps.extend(dm.get(None, ())) for ext in extras: try: deps.extend(dm[safe_extra(ext)]) @@ -2260,18 +2559,21 @@ class Distribution(object): ) return deps - def _get_metadata(self,name): + def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line - def activate(self,path=None): + def activate(self, path=None): """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path + if path is None: + path = sys.path self.insert_on(path) if path is sys.path: fixup_namespace_packages(self.location) - list(map(declare_namespace, self._get_metadata('namespace_packages.txt'))) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) def egg_name(self): """Return what this distribution's standard .egg filename should be""" @@ -2281,29 +2583,31 @@ class Distribution(object): ) if self.platform: - filename += '-'+self.platform + filename += '-' + self.platform return filename def __repr__(self): if self.location: - return "%s (%s)" % (self,self.location) + return "%s (%s)" % (self, self.location) else: return str(self) def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None + try: + version = getattr(self, 'version', None) + except ValueError: + version = None version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) + return "%s %s" % (self.project_name, version) - def __getattr__(self,attr): + def __getattr__(self, attr): """Delegate all unrecognized public attributes to .metadata provider""" if attr.startswith('_'): raise AttributeError(attr) return getattr(self._provider, attr) @classmethod - def from_filename(cls,filename,metadata=None, **kw): + def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( _normalize_cached(filename), os.path.basename(filename), metadata, **kw @@ -2311,13 +2615,18 @@ class Distribution(object): def as_requirement(self): """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) def load_entry_point(self, group, name): """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) + ep = self.get_entry_info(group, name) if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) + raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load() def get_entry_map(self, group=None): @@ -2348,9 +2657,9 @@ class Distribution(object): npath= [(p and _normalize_cached(p) or p) for p in path] for p, item in enumerate(npath): - if item==nloc: + if item == nloc: break - elif item==bdir and self.precedence==EGG_DIST: + elif item == bdir and self.precedence == EGG_DIST: # if it's an .egg, give it precedence over its directory if path is sys.path: self.check_version_conflict() @@ -2364,20 +2673,22 @@ class Distribution(object): return # p is the spot where we found or inserted loc; now remove duplicates - while 1: + while True: try: np = npath.index(nloc, p+1) except ValueError: break else: del npath[np], path[np] - p = np # ha! + # ha! + p = np return def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) @@ -2400,17 +2711,15 @@ class Distribution(object): try: self.version except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) + issue_warning("Unbuilt egg for " + repr(self)) return False return True def clone(self,**kw): """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) kw.setdefault('metadata', self._provider) return self.__class__(**kw) @@ -2430,8 +2739,8 @@ class DistInfoDistribution(Distribution): try: return self._pkg_info except AttributeError: - from email.parser import Parser - self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO)) + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) return self._pkg_info @property @@ -2499,25 +2808,23 @@ def issue_warning(*args,**kw): level += 1 except ValueError: pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) + warnings.warn(stacklevel=level + 1, *args, **kw) def parse_requirements(strs): """Yield ``Requirement`` objects for each specification in `strs` - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. + `strs` must be a string, or a (possibly-nested) iterable thereof. """ # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): + def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): items = [] - while not TERMINATOR(line,p): - if CONTINUE(line,p): + while not TERMINATOR(line, p): + if CONTINUE(line, p): try: line = next(lines) p = 0 @@ -2526,23 +2833,26 @@ def parse_requirements(strs): "\\ must not appear on the last nonblank line" ) - match = ITEM(line,p) + match = ITEM(line, p) if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) + msg = "Expected " + item_name + " in" + raise ValueError(msg, line, "at", line[p:]) items.append(match.group(*groups)) p = match.end() - match = COMMA(line,p) + match = COMMA(line, p) if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) + # skip the comma + p = match.end() + elif not TERMINATOR(line, p): + msg = "Expected ',' or end-of-list in" + raise ValueError(msg, line, "at", line[p:]) - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any + match = TERMINATOR(line, p) + # skip the terminator, if any + if match: + p = match.end() return line, p, items for line in lines: @@ -2553,67 +2863,59 @@ def parse_requirements(strs): p = match.end() extras = [] - match = OBRACKET(line,p) + match = OBRACKET(line, p) if match: p = match.end() line, p, extras = scan_list( DISTRO, CBRACKET, line, p, (1,), "'extra' name" ) - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] + line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), + "version spec") + specs = [(op, val) for op, val in specs] yield Requirement(project_name, specs, extras) -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - class Requirement: def __init__(self, project_name, specs, extras): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" self.unsafe_name, project_name = project_name, safe_name(project_name) self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) + self.specifier = packaging.specifiers.SpecifierSet( + ",".join(["".join([x, y]) for x, y in specs]) + ) + self.specs = specs + self.extras = tuple(map(safe_extra, extras)) self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) + self.key, + self.specifier, + frozenset(self.extras), ) self.__hash = hash(self.hashCmp) def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) + if extras: + extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, self.specifier) - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key != self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: return False - elif action=='T': - return True - elif action=='+': - last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) def __hash__(self): return self.__hash @@ -2624,26 +2926,16 @@ class Requirement: def parse(s): reqs = list(parse_requirements(s)) if reqs: - if len(reqs)==1: + if len(reqs) == 1: return reqs[0] raise ValueError("Expected only one requirement", s) raise ValueError("No requirements found", s) -state_machine = { - # =>< - '<': '--T', - '<=': 'T-T', - '>': 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - def _get_mro(cls): """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass + if not isinstance(cls, type): + class cls(cls, object): pass return cls.__mro__[1:] return cls.__mro__ @@ -2660,8 +2952,19 @@ def ensure_directory(path): if not os.path.isdir(dirname): os.makedirs(dirname) + +def _bypass_ensure_directory(path, mode=0o777): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, mode) + + def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs + """Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and @@ -2686,13 +2989,21 @@ def split_sections(s): yield section, content def _mkstemp(*args,**kw): - from tempfile import mkstemp old_open = os.open try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args,**kw) finally: - os.open = old_open # and then put it back + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) # Set up global resource manager (deliberately not state-saved) @@ -2704,35 +3015,19 @@ def _initialize(g): _initialize(globals()) # Prepare the master working set and make the ``require()`` API available -_declare_state('object', working_set = WorkingSet()) -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path +working_set = WorkingSet._build_master() +_declare_state('object', working_set=working_set) require = working_set.require iter_entry_points = working_set.iter_entry_points add_activation_listener = working_set.subscribe run_script = working_set.run_script -run_main = run_script # backward compatibility +# backward compatibility +run_main = run_script # Activate all distributions already on sys.path, and ensure that # all distributions added to the working set in the future (e.g. by # calling ``require()``) will get activated as well. add_activation_listener(lambda dist: dist.activate()) working_set.entries=[] -list(map(working_set.add_entry,sys.path)) # match order +# match order +list(map(working_set.add_entry, sys.path)) diff --git a/awx/lib/site-packages/pkg_resources/_vendor/__init__.py b/awx/lib/site-packages/pkg_resources/_vendor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 0000000000..36f1a35c85 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,31 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "15.0" + +__author__ = "Donald Stufft" +__email__ = "donald@stufft.io" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2014 %s" % __author__ diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 0000000000..c39a8eab8e --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000000..5c396ceac6 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,40 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000000..0ae9bb52a2 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,78 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000..9ad0a635ed --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,772 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self._spec[0])(item, self._spec[1]) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease + and not (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^\s]* # We just match everything, except for whitespace since this + # is a "legacy" specifier and the version string can be just + # about anything. + ) + \s* + $ + """, + re.VERBOSE | re.IGNORECASE, + ) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") + and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) + and self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split):]) + right_split.append(left[len(right_split):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + # Note: The use of any() here means that an empty set of specifiers + # will always return False, this is an explicit design decision. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if (not (self.prereleases or prereleases)) and item.is_prerelease: + return False + + # Determine if we're forcing a prerelease or not, we bypass + # self.prereleases here and use self._prereleases because we want to + # only take into consideration actual *forced* values. The underlying + # specifiers will handle the other logic. + # The logic here is: If prereleases is anything but None, we'll just + # go aheand and continue to use that. However if + # prereleases is None, then we'll use whatever the + # value of self._prereleases is as long as it is not + # None itself. + if prereleases is None and self._prereleases is not None: + prereleases = self._prereleases + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, we bypass + # self.prereleases here and use self._prereleases because we want to + # only take into consideration actual *forced* values. The underlying + # specifiers will handle the other logic. + # The logic here is: If prereleases is anything but None, we'll just + # go aheand and continue to use that. However if + # prereleases is None, then we'll use whatever the + # value of self._prereleases is as long as it is not + # None itself. + if prereleases is None and self._prereleases is not None: + prereleases = self._prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=prereleases) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000000..cf8afb16d6 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,401 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    def __init__(self, version):
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"),
+                match.group("pre_n"),
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"),
+                match.group("dev_n"),
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        # Pre-release
+        if self._version.pre is not None:
+            parts.append("".join(str(x) for x in self._version.pre))
+
+        # Post-release
+        if self._version.post is not None:
+            parts.append(".post{0}".format(self._version.post[1]))
+
+        # Development release
+        if self._version.dev is not None:
+            parts.append(".dev{0}".format(self._version.dev[1]))
+
+        # Local version segment
+        if self._version.local is not None:
+            parts.append(
+                "+{0}".format(".".join(str(x) for x in self._version.local))
+            )
+
+        return "".join(parts)
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        return "".join(parts)
+
+    @property
+    def local(self):
+        version_string = str(self)
+        if "+" in version_string:
+            return version_string.split("+", 1)[1]
+
+    @property
+    def is_prerelease(self):
+        return bool(self._version.dev or self._version.pre)
+
+    @property
+    def is_postrelease(self):
+        return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_seperators.split(local)
+        )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    release = tuple(
+        reversed(list(
+            itertools.dropwhile(
+                lambda x: x == 0,
+                reversed(release),
+            )
+        ))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        pre = -Infinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        pre = Infinity
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        post = -Infinity
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        dev = Infinity
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        local = -Infinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        local = tuple(
+            (i, "") if isinstance(i, int) else (-Infinity, i)
+            for i in local
+        )
+
+    return epoch, release, pre, post, dev, local
diff --git a/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt b/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt
new file mode 100644
index 0000000000..75a31670c6
--- /dev/null
+++ b/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt
@@ -0,0 +1 @@
+packaging==15.0
diff --git a/awx/lib/site-packages/pkg_resources/api_tests.txt b/awx/lib/site-packages/pkg_resources/api_tests.txt
new file mode 100644
index 0000000000..a6c25a378f
--- /dev/null
+++ b/awx/lib/site-packages/pkg_resources/api_tests.txt
@@ -0,0 +1,419 @@
+Pluggable Distributions of Python Software
+==========================================
+
+Distributions
+-------------
+
+A "Distribution" is a collection of files that represent a "Release" of a
+"Project" as of a particular point in time, denoted by a
+"Version"::
+
+    >>> import sys, pkg_resources
+    >>> from pkg_resources import Distribution
+    >>> Distribution(project_name="Foo", version="1.2")
+    Foo 1.2
+
+Distributions have a location, which can be a filename, URL, or really anything
+else you care to use::
+
+    >>> dist = Distribution(
+    ...     location="http://example.com/something",
+    ...     project_name="Bar", version="0.9"
+    ... )
+
+    >>> dist
+    Bar 0.9 (http://example.com/something)
+
+
+Distributions have various introspectable attributes::
+
+    >>> dist.location
+    'http://example.com/something'
+
+    >>> dist.project_name
+    'Bar'
+
+    >>> dist.version
+    '0.9'
+
+    >>> dist.py_version == sys.version[:3]
+    True
+
+    >>> print(dist.platform)
+    None
+
+Including various computed attributes::
+
+    >>> from pkg_resources import parse_version
+    >>> dist.parsed_version == parse_version(dist.version)
+    True
+
+    >>> dist.key    # case-insensitive form of the project name
+    'bar'
+
+Distributions are compared (and hashed) by version first::
+
+    >>> Distribution(version='1.0') == Distribution(version='1.0')
+    True
+    >>> Distribution(version='1.0') == Distribution(version='1.1')
+    False
+    >>> Distribution(version='1.0') <  Distribution(version='1.1')
+    True
+
+but also by project name (case-insensitive), platform, Python version,
+location, etc.::
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.1")
+    False
+
+    >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
+    ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
+    False
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="spam",version="1.0")
+    True
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="baz",version="1.0")
+    False
+
+
+
+Hash and compare distribution by prio/plat
+
+Get version from metadata
+provider capabilities
+egg_name()
+as_requirement()
+from_location, from_filename (w/path normalization)
+
+Releases may have zero or more "Requirements", which indicate
+what releases of another project the release requires in order to
+function.  A Requirement names the other project, expresses some criteria
+as to what releases of that project are acceptable, and lists any "Extras"
+that the requiring release may need from that project.  (An Extra is an
+optional feature of a Release, that can only be used if its additional
+Requirements are satisfied.)
+
+
+
+The Working Set
+---------------
+
+A collection of active distributions is called a Working Set.  Note that a
+Working Set can contain any importable distribution, not just pluggable ones.
+For example, the Python standard library is an importable distribution that
+will usually be part of the Working Set, even though it is not pluggable.
+Similarly, when you are doing development work on a project, the files you are
+editing are also a Distribution.  (And, with a little attention to the
+directory names used,  and including some additional metadata, such a
+"development distribution" can be made pluggable as well.)
+
+    >>> from pkg_resources import WorkingSet
+
+A working set's entries are the sys.path entries that correspond to the active
+distributions.  By default, the working set's entries are the items on
+``sys.path``::
+
+    >>> ws = WorkingSet()
+    >>> ws.entries == sys.path
+    True
+
+But you can also create an empty working set explicitly, and add distributions
+to it::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist)
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> dist in ws
+    True
+    >>> Distribution('foo',version="") in ws
+    False
+
+And you can iterate over its distributions::
+
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+Adding the same distribution more than once is a no-op::
+
+    >>> ws.add(dist)
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+For that matter, adding multiple distributions for the same project also does
+nothing, because a working set can only hold one active distribution per
+project -- the first one added to it::
+
+    >>> ws.add(
+    ...     Distribution(
+    ...         'http://example.com/something', project_name="Bar",
+    ...         version="7.2"
+    ...     )
+    ... )
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can append a path entry to a working set using ``add_entry()``::
+
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries
+    ['http://example.com/something', '...pkg_resources...']
+
+Multiple additions result in multiple entries, even if the entry is already in
+the working set (because ``sys.path`` can contain the same entry more than
+once)::
+
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries
+    ['...example.com...', '...pkg_resources...', '...pkg_resources...']
+
+And you can specify the path entry a distribution was found under, using the
+optional second parameter to ``add()``::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist,"foo")
+    >>> ws.entries
+    ['foo']
+
+But even if a distribution is found under multiple path entries, it still only
+shows up once when iterating the working set:
+
+    >>> ws.add_entry(ws.entries[0])
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
+
+    >>> from pkg_resources import Requirement
+    >>> print(ws.find(Requirement.parse("Foo==1.0")))   # no match, return None
+    None
+
+    >>> ws.find(Requirement.parse("Bar==0.9"))  # match, return distribution
+    Bar 0.9 (http://example.com/something)
+
+Note that asking for a conflicting version of a distribution already in a
+working set triggers a ``pkg_resources.VersionConflict`` error:
+
+    >>> try:
+    ...     ws.find(Requirement.parse("Bar==1.0"))
+    ... except pkg_resources.VersionConflict as exc:
+    ...     print(str(exc))
+    ... else:
+    ...     raise AssertionError("VersionConflict was not raised")
+    (Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
+
+You can subscribe a callback function to receive notifications whenever a new
+distribution is added to a working set.  The callback is immediately invoked
+once for each existing distribution in the working set, and then is called
+again for new distributions added thereafter::
+
+    >>> def added(dist): print("Added %s" % dist)
+    >>> ws.subscribe(added)
+    Added Bar 0.9
+    >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
+    >>> ws.add(foo12)
+    Added Foo 1.2
+
+Note, however, that only the first distribution added for a given project name
+will trigger a callback, even during the initial ``subscribe()`` callback::
+
+    >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
+    >>> ws.add(foo14)   # no callback, because Foo 1.2 is already active
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(foo12)
+    >>> ws.add(foo14)
+    >>> ws.subscribe(added)
+    Added Foo 1.2
+
+And adding a callback more than once has no effect, either::
+
+    >>> ws.subscribe(added)     # no callbacks
+
+    # and no double-callbacks on subsequent additions, either
+    >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
+    >>> ws.add(just_a_test)
+    Added JustATest 0.99
+
+
+Finding Plugins
+---------------
+
+``WorkingSet`` objects can be used to figure out what plugins in an
+``Environment`` can be loaded without any resolution errors::
+
+    >>> from pkg_resources import Environment
+
+    >>> plugins = Environment([])   # normally, a list of plugin directories
+    >>> plugins.add(foo12)
+    >>> plugins.add(foo14)
+    >>> plugins.add(just_a_test)
+
+In the simplest case, we just get the newest version of each distribution in
+the plugin environment::
+
+    >>> ws = WorkingSet([])
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.4 (f14)], {})
+
+But if there's a problem with a version conflict or missing requirements, the
+method falls back to older versions, and the error info dict will contain an
+exception instance for each unloadable plugin::
+
+    >>> ws.add(foo12)   # this will conflict with Foo 1.4
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
+
+But if you disallow fallbacks, the failed plugin will be skipped instead of
+trying older versions::
+
+    >>> ws.find_plugins(plugins, fallback=False)
+    ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
+
+
+
+Platform Compatibility Rules
+----------------------------
+
+On the Mac, there are potential compatibility issues for modules compiled
+on newer versions of Mac OS X than what the user is running. Additionally,
+Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
+
+Basic equality works as on other platforms::
+
+    >>> from pkg_resources import compatible_platforms as cp
+    >>> reqd = 'macosx-10.4-ppc'
+    >>> cp(reqd, reqd)
+    True
+    >>> cp("win32", reqd)
+    False
+
+Distributions made on other machine types are not compatible::
+
+    >>> cp("macosx-10.4-i386", reqd)
+    False
+
+Distributions made on earlier versions of the OS are compatible, as
+long as they are from the same top-level version. The patchlevel version
+number does not matter::
+
+    >>> cp("macosx-10.4-ppc", reqd)
+    True
+    >>> cp("macosx-10.3-ppc", reqd)
+    True
+    >>> cp("macosx-10.5-ppc", reqd)
+    False
+    >>> cp("macosx-9.5-ppc", reqd)
+    False
+
+Backwards compatibility for packages made via earlier versions of
+setuptools is provided as well::
+
+    >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
+    False
+
+
+Environment Markers
+-------------------
+
+    >>> from pkg_resources import invalid_marker as im, evaluate_marker as em
+    >>> import os
+
+    >>> print(im("sys_platform"))
+    Comparison or logical expression expected
+
+    >>> print(im("sys_platform=="))
+    invalid syntax
+
+    >>> print(im("sys_platform=='win32'"))
+    False
+
+    >>> print(im("sys=='x'"))
+    Unknown name 'sys'
+
+    >>> print(im("(extra)"))
+    Comparison or logical expression expected
+
+    >>> print(im("(extra"))
+    invalid syntax
+
+    >>> print(im("os.open('foo')=='y'"))
+    Language feature not supported in environment markers
+
+    >>> print(im("'x'=='y' and os.open('foo')=='y'"))   # no short-circuit!
+    Language feature not supported in environment markers
+
+    >>> print(im("'x'=='x' or os.open('foo')=='y'"))   # no short-circuit!
+    Language feature not supported in environment markers
+
+    >>> print(im("'x' < 'y'"))
+    '<' operator not allowed in environment markers
+
+    >>> print(im("'x' < 'y' < 'z'"))
+    Chained comparison not allowed in environment markers
+
+    >>> print(im("r'x'=='x'"))
+    Only plain strings allowed in environment markers
+
+    >>> print(im("'''x'''=='x'"))
+    Only plain strings allowed in environment markers
+
+    >>> print(im('"""x"""=="x"'))
+    Only plain strings allowed in environment markers
+
+    >>> print(im(r"'x\n'=='x'"))
+    Only plain strings allowed in environment markers
+
+    >>> print(im("os.open=='y'"))
+    Language feature not supported in environment markers
+
+    >>> em('"x"=="x"')
+    True
+
+    >>> em('"x"=="y"')
+    False
+
+    >>> em('"x"=="y" and "x"=="x"')
+    False
+
+    >>> em('"x"=="y" or "x"=="x"')
+    True
+
+    >>> em('"x"=="y" and "x"=="q" or "z"=="z"')
+    True
+
+    >>> em('"x"=="y" and ("x"=="q" or "z"=="z")')
+    False
+
+    >>> em('"x"=="y" and "z"=="z" or "x"=="q"')
+    False
+
+    >>> em('"x"=="x" and "z"=="z" or "x"=="q"')
+    True
+
+    >>> em("sys_platform=='win32'") == (sys.platform=='win32')
+    True
+
+    >>> em("'x' in 'yx'")
+    True
+
+    >>> em("'yx' in 'x'")
+    False
diff --git a/awx/lib/site-packages/pkg_resources/tests/__init__.py b/awx/lib/site-packages/pkg_resources/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py b/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py
new file mode 100644
index 0000000000..564d7cec4f
--- /dev/null
+++ b/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py
@@ -0,0 +1,111 @@
+import sys
+import tempfile
+import os
+import zipfile
+import datetime
+import time
+import subprocess
+
+import pkg_resources
+
+try:
+    unicode
+except NameError:
+    unicode = str
+
+def timestamp(dt):
+    """
+    Return a timestamp for a local, naive datetime instance.
+    """
+    try:
+        return dt.timestamp()
+    except AttributeError:
+        # Python 3.2 and earlier
+        return time.mktime(dt.timetuple())
+
+class EggRemover(unicode):
+    def __call__(self):
+        if self in sys.path:
+            sys.path.remove(self)
+        if os.path.exists(self):
+            os.remove(self)
+
+class TestZipProvider(object):
+    finalizers = []
+
+    ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
+    "A reference time for a file modification"
+
+    @classmethod
+    def setup_class(cls):
+        "create a zip egg and add it to sys.path"
+        egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False)
+        zip_egg = zipfile.ZipFile(egg, 'w')
+        zip_info = zipfile.ZipInfo()
+        zip_info.filename = 'mod.py'
+        zip_info.date_time = cls.ref_time.timetuple()
+        zip_egg.writestr(zip_info, 'x = 3\n')
+        zip_info = zipfile.ZipInfo()
+        zip_info.filename = 'data.dat'
+        zip_info.date_time = cls.ref_time.timetuple()
+        zip_egg.writestr(zip_info, 'hello, world!')
+        zip_egg.close()
+        egg.close()
+
+        sys.path.append(egg.name)
+        cls.finalizers.append(EggRemover(egg.name))
+
+    @classmethod
+    def teardown_class(cls):
+        for finalizer in cls.finalizers:
+            finalizer()
+
+    def test_resource_filename_rewrites_on_change(self):
+        """
+        If a previous call to get_resource_filename has saved the file, but
+        the file has been subsequently mutated with different file of the
+        same size and modification time, it should not be overwritten on a
+        subsequent call to get_resource_filename.
+        """
+        import mod
+        manager = pkg_resources.ResourceManager()
+        zp = pkg_resources.ZipProvider(mod)
+        filename = zp.get_resource_filename(manager, 'data.dat')
+        actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime)
+        assert actual == self.ref_time
+        f = open(filename, 'w')
+        f.write('hello, world?')
+        f.close()
+        ts = timestamp(self.ref_time)
+        os.utime(filename, (ts, ts))
+        filename = zp.get_resource_filename(manager, 'data.dat')
+        f = open(filename)
+        assert f.read() == 'hello, world!'
+        manager.cleanup_resources()
+
+class TestResourceManager(object):
+    def test_get_cache_path(self):
+        mgr = pkg_resources.ResourceManager()
+        path = mgr.get_cache_path('foo')
+        type_ = str(type(path))
+        message = "Unexpected type from get_cache_path: " + type_
+        assert isinstance(path, (unicode, str)), message
+
+
+class TestIndependence:
+    """
+    Tests to ensure that pkg_resources runs independently from setuptools.
+    """
+    def test_setuptools_not_imported(self):
+        """
+        In a separate Python environment, import pkg_resources and assert
+        that action doesn't cause setuptools to be imported.
+        """
+        lines = (
+            'import pkg_resources',
+            'import sys',
+            'assert "setuptools" not in sys.modules, '
+                '"setuptools was imported"',
+        )
+        cmd = [sys.executable, '-c', '; '.join(lines)]
+        subprocess.check_call(cmd)
diff --git a/awx/lib/site-packages/pkg_resources/tests/test_resources.py b/awx/lib/site-packages/pkg_resources/tests/test_resources.py
new file mode 100644
index 0000000000..a55478a249
--- /dev/null
+++ b/awx/lib/site-packages/pkg_resources/tests/test_resources.py
@@ -0,0 +1,661 @@
+import os
+import sys
+import tempfile
+import shutil
+import string
+
+import pytest
+
+import pkg_resources
+from pkg_resources import (parse_requirements, VersionConflict, parse_version,
+    Distribution, EntryPoint, Requirement, safe_version, safe_name,
+    WorkingSet)
+
+packaging = pkg_resources.packaging
+
+
+def safe_repr(obj, short=False):
+    """ copied from Python2.7"""
+    try:
+        result = repr(obj)
+    except Exception:
+        result = object.__repr__(obj)
+    if not short or len(result) < pkg_resources._MAX_LENGTH:
+        return result
+    return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...'
+
+
+class Metadata(pkg_resources.EmptyProvider):
+    """Mock object to return metadata as if from an on-disk distribution"""
+
+    def __init__(self, *pairs):
+        self.metadata = dict(pairs)
+
+    def has_metadata(self, name):
+        return name in self.metadata
+
+    def get_metadata(self, name):
+        return self.metadata[name]
+
+    def get_metadata_lines(self, name):
+        return pkg_resources.yield_lines(self.get_metadata(name))
+
+
+dist_from_fn = pkg_resources.Distribution.from_filename
+
+class TestDistro:
+
+    def testCollection(self):
+        # empty path should produce no distributions
+        ad = pkg_resources.Environment([], platform=None, python=None)
+        assert list(ad) == []
+        assert ad['FooPkg'] == []
+        ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
+        ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
+        ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
+
+        # Name is in there now
+        assert ad['FooPkg']
+        # But only 1 package
+        assert list(ad) == ['foopkg']
+
+        # Distributions sort by version
+        assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2']
+
+        # Removing a distribution leaves sequence alone
+        ad.remove(ad['FooPkg'][1])
+        assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2']
+
+        # And inserting adds them in order
+        ad.add(dist_from_fn("FooPkg-1.9.egg"))
+        assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2']
+
+        ws = WorkingSet([])
+        foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
+        foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
+        req, = parse_requirements("FooPkg>=1.3")
+
+        # Nominal case: no distros on path, should yield all applicable
+        assert ad.best_match(req, ws).version == '1.9'
+        # If a matching distro is already installed, should return only that
+        ws.add(foo14)
+        assert ad.best_match(req, ws).version == '1.4'
+
+        # If the first matching distro is unsuitable, it's a version conflict
+        ws = WorkingSet([])
+        ws.add(foo12)
+        ws.add(foo14)
+        with pytest.raises(VersionConflict):
+            ad.best_match(req, ws)
+
+        # If more than one match on the path, the first one takes precedence
+        ws = WorkingSet([])
+        ws.add(foo14)
+        ws.add(foo12)
+        ws.add(foo14)
+        assert ad.best_match(req, ws).version == '1.4'
+
+    def checkFooPkg(self,d):
+        assert d.project_name == "FooPkg"
+        assert d.key == "foopkg"
+        assert d.version == "1.3.post1"
+        assert d.py_version == "2.4"
+        assert d.platform == "win32"
+        assert d.parsed_version == parse_version("1.3-1")
+
+    def testDistroBasics(self):
+        d = Distribution(
+            "/some/path",
+            project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
+        )
+        self.checkFooPkg(d)
+
+        d = Distribution("/some/path")
+        assert d.py_version == sys.version[:3]
+        assert d.platform == None
+
+    def testDistroParse(self):
+        d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg")
+        self.checkFooPkg(d)
+        d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info")
+        self.checkFooPkg(d)
+
+    def testDistroMetadata(self):
+        d = Distribution(
+            "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
+            metadata = Metadata(
+                ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
+            )
+        )
+        self.checkFooPkg(d)
+
+    def distRequires(self, txt):
+        return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
+
+    def checkRequires(self, dist, txt, extras=()):
+        assert list(dist.requires(extras)) == list(parse_requirements(txt))
+
+    def testDistroDependsSimple(self):
+        for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
+            self.checkRequires(self.distRequires(v), v)
+
+    def testResolve(self):
+        ad = pkg_resources.Environment([])
+        ws = WorkingSet([])
+        # Resolving no requirements -> nothing to install
+        assert list(ws.resolve([], ad)) == []
+        # Request something not in the collection -> DistributionNotFound
+        with pytest.raises(pkg_resources.DistributionNotFound):
+            ws.resolve(parse_requirements("Foo"), ad)
+
+        Foo = Distribution.from_filename(
+            "/foo_dir/Foo-1.2.egg",
+            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
+        )
+        ad.add(Foo)
+        ad.add(Distribution.from_filename("Foo-0.9.egg"))
+
+        # Request thing(s) that are available -> list to activate
+        for i in range(3):
+            targets = list(ws.resolve(parse_requirements("Foo"), ad))
+            assert targets == [Foo]
+            list(map(ws.add,targets))
+        with pytest.raises(VersionConflict):
+            ws.resolve(parse_requirements("Foo==0.9"), ad)
+        ws = WorkingSet([]) # reset
+
+        # Request an extra that causes an unresolved dependency for "Baz"
+        with pytest.raises(pkg_resources.DistributionNotFound):
+            ws.resolve(parse_requirements("Foo[bar]"), ad)
+        Baz = Distribution.from_filename(
+            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
+        )
+        ad.add(Baz)
+
+        # Activation list now includes resolved dependency
+        assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) ==[Foo,Baz]
+        # Requests for conflicting versions produce VersionConflict
+        with pytest.raises(VersionConflict) as vc:
+            ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
+
+        msg = 'Foo 0.9 is installed but Foo==1.2 is required'
+        assert vc.value.report() == msg
+
+    def testDistroDependsOptions(self):
+        d = self.distRequires("""
+            Twisted>=1.5
+            [docgen]
+            ZConfig>=2.0
+            docutils>=0.3
+            [fastcgi]
+            fcgiapp>=0.1""")
+        self.checkRequires(d,"Twisted>=1.5")
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
+            ["docgen","fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
+            ["fastcgi", "docgen"]
+        )
+        with pytest.raises(pkg_resources.UnknownExtra):
+            d.requires(["foo"])
+
+
+class TestWorkingSet:
+    def test_find_conflicting(self):
+        ws = WorkingSet([])
+        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
+        ws.add(Foo)
+
+        # create a requirement that conflicts with Foo 1.2
+        req = next(parse_requirements("Foo<1.2"))
+
+        with pytest.raises(VersionConflict) as vc:
+            ws.find(req)
+
+        msg = 'Foo 1.2 is installed but Foo<1.2 is required'
+        assert vc.value.report() == msg
+
+    def test_resolve_conflicts_with_prior(self):
+        """
+        A ContextualVersionConflict should be raised when a requirement
+        conflicts with a prior requirement for a different package.
+        """
+        # Create installation where Foo depends on Baz 1.0 and Bar depends on
+        # Baz 2.0.
+        ws = WorkingSet([])
+        md = Metadata(('depends.txt', "Baz==1.0"))
+        Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
+        ws.add(Foo)
+        md = Metadata(('depends.txt', "Baz==2.0"))
+        Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
+        ws.add(Bar)
+        Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
+        ws.add(Baz)
+        Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
+        ws.add(Baz)
+
+        with pytest.raises(VersionConflict) as vc:
+            ws.resolve(parse_requirements("Foo\nBar\n"))
+
+        msg = "Baz 1.0 is installed but Baz==2.0 is required by {'Bar'}"
+        if pkg_resources.PY2:
+            msg = msg.replace("{'Bar'}", "set(['Bar'])")
+        assert vc.value.report() == msg
+
+
+class TestEntryPoints:
+
+    def assertfields(self, ep):
+        assert ep.name == "foo"
+        assert ep.module_name == "pkg_resources.tests.test_resources"
+        assert ep.attrs == ("TestEntryPoints",)
+        assert ep.extras == ("x",)
+        assert ep.load() is TestEntryPoints
+        expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
+        assert str(ep) == expect
+
+    def setup_method(self, method):
+        self.dist = Distribution.from_filename(
+            "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
+
+    def testBasics(self):
+        ep = EntryPoint(
+            "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"],
+            ["x"], self.dist
+        )
+        self.assertfields(ep)
+
+    def testParse(self):
+        s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
+        ep = EntryPoint.parse(s, self.dist)
+        self.assertfields(ep)
+
+        ep = EntryPoint.parse("bar baz=  spammity[PING]")
+        assert ep.name == "bar baz"
+        assert ep.module_name == "spammity"
+        assert ep.attrs == ()
+        assert ep.extras == ("ping",)
+
+        ep = EntryPoint.parse(" fizzly =  wocka:foo")
+        assert ep.name == "fizzly"
+        assert ep.module_name == "wocka"
+        assert ep.attrs == ("foo",)
+        assert ep.extras == ()
+
+        # plus in the name
+        spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer"
+        ep = EntryPoint.parse(spec)
+        assert ep.name == 'html+mako'
+
+    reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2"
+    @pytest.mark.parametrize("reject_spec", reject_specs)
+    def test_reject_spec(self, reject_spec):
+        with pytest.raises(ValueError):
+            EntryPoint.parse(reject_spec)
+
+    def test_printable_name(self):
+        """
+        Allow any printable character in the name.
+        """
+        # Create a name with all printable characters; strip the whitespace.
+        name = string.printable.strip()
+        spec = "{name} = module:attr".format(**locals())
+        ep = EntryPoint.parse(spec)
+        assert ep.name == name
+
+    def checkSubMap(self, m):
+        assert len(m) == len(self.submap_expect)
+        for key, ep in pkg_resources.iteritems(self.submap_expect):
+            assert repr(m.get(key)) == repr(ep)
+
+    submap_expect = dict(
+        feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
+        feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
+        feature3=EntryPoint('feature3', 'this.module', extras=['something'])
+    )
+    submap_str = """
+            # define features for blah blah
+            feature1 = somemodule:somefunction
+            feature2 = another.module:SomeClass [extra1,extra2]
+            feature3 = this.module [something]
+    """
+
+    def testParseList(self):
+        self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
+        with pytest.raises(ValueError):
+            EntryPoint.parse_group("x a", "foo=bar")
+        with pytest.raises(ValueError):
+            EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
+
+    def testParseMap(self):
+        m = EntryPoint.parse_map({'xyz':self.submap_str})
+        self.checkSubMap(m['xyz'])
+        assert list(m.keys()) == ['xyz']
+        m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
+        self.checkSubMap(m['xyz'])
+        assert list(m.keys()) == ['xyz']
+        with pytest.raises(ValueError):
+            EntryPoint.parse_map(["[xyz]", "[xyz]"])
+        with pytest.raises(ValueError):
+            EntryPoint.parse_map(self.submap_str)
+
+class TestRequirements:
+
+    def testBasics(self):
+        r = Requirement.parse("Twisted>=1.2")
+        assert str(r) == "Twisted>=1.2"
+        assert repr(r) == "Requirement.parse('Twisted>=1.2')"
+        assert r == Requirement("Twisted", [('>=','1.2')], ())
+        assert r == Requirement("twisTed", [('>=','1.2')], ())
+        assert r != Requirement("Twisted", [('>=','2.0')], ())
+        assert r != Requirement("Zope", [('>=','1.2')], ())
+        assert r != Requirement("Zope", [('>=','3.0')], ())
+        assert r != Requirement.parse("Twisted[extras]>=1.2")
+
+    def testOrdering(self):
+        r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
+        r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
+        assert r1 == r2
+        assert str(r1) == str(r2)
+        assert str(r2) == "Twisted==1.2c1,>=1.2"
+
+    def testBasicContains(self):
+        r = Requirement("Twisted", [('>=','1.2')], ())
+        foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
+        twist11 = Distribution.from_filename("Twisted-1.1.egg")
+        twist12 = Distribution.from_filename("Twisted-1.2.egg")
+        assert parse_version('1.2') in r
+        assert parse_version('1.1') not in r
+        assert '1.2' in r
+        assert '1.1' not in r
+        assert foo_dist not in r
+        assert twist11 not in r
+        assert twist12 in r
+
+    def testOptionsAndHashing(self):
+        r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
+        r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
+        assert r1 == r2
+        assert r1.extras == ("foo","bar")
+        assert r2.extras == ("bar","foo")  # extras are normalized
+        assert hash(r1) == hash(r2)
+        assert (
+            hash(r1)
+            ==
+            hash((
+                "twisted",
+                packaging.specifiers.SpecifierSet(">=1.2"),
+                frozenset(["foo","bar"]),
+            ))
+        )
+
+    def testVersionEquality(self):
+        r1 = Requirement.parse("foo==0.3a2")
+        r2 = Requirement.parse("foo!=0.3a4")
+        d = Distribution.from_filename
+
+        assert d("foo-0.3a4.egg") not in r1
+        assert d("foo-0.3a1.egg") not in r1
+        assert d("foo-0.3a4.egg") not in r2
+
+        assert d("foo-0.3a2.egg") in r1
+        assert d("foo-0.3a2.egg") in r2
+        assert d("foo-0.3a3.egg") in r2
+        assert d("foo-0.3a5.egg") in r2
+
+    def testSetuptoolsProjectName(self):
+        """
+        The setuptools project should implement the setuptools package.
+        """
+
+        assert (
+            Requirement.parse('setuptools').project_name == 'setuptools')
+        # setuptools 0.7 and higher means setuptools.
+        assert (
+            Requirement.parse('setuptools == 0.7').project_name == 'setuptools')
+        assert (
+            Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools')
+        assert (
+            Requirement.parse('setuptools >= 0.7').project_name == 'setuptools')
+
+
+class TestParsing:
+
+    def testEmptyParse(self):
+        assert list(parse_requirements('')) == []
+
+    def testYielding(self):
+        for inp,out in [
+            ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
+            (['x\n\n','y'], ['x','y']),
+        ]:
+            assert list(pkg_resources.yield_lines(inp)) == out
+
+    def testSplitting(self):
+        sample = """
+                    x
+                    [Y]
+                    z
+
+                    a
+                    [b ]
+                    # foo
+                    c
+                    [ d]
+                    [q]
+                    v
+                    """
+        assert (
+            list(pkg_resources.split_sections(sample))
+                ==
+            [
+                (None, ["x"]),
+                ("Y", ["z", "a"]),
+                ("b", ["c"]),
+                ("d", []),
+                ("q", ["v"]),
+            ]
+        )
+        with pytest.raises(ValueError):
+            list(pkg_resources.split_sections("[foo"))
+
+    def testSafeName(self):
+        assert safe_name("adns-python") == "adns-python"
+        assert safe_name("WSGI Utils") == "WSGI-Utils"
+        assert safe_name("WSGI  Utils") == "WSGI-Utils"
+        assert safe_name("Money$$$Maker") == "Money-Maker"
+        assert safe_name("peak.web") != "peak-web"
+
+    def testSafeVersion(self):
+        assert safe_version("1.2-1") == "1.2.post1"
+        assert safe_version("1.2 alpha") == "1.2.alpha"
+        assert safe_version("2.3.4 20050521") == "2.3.4.20050521"
+        assert safe_version("Money$$$Maker") == "Money-Maker"
+        assert safe_version("peak.web") == "peak.web"
+
+    def testSimpleRequirements(self):
+        assert (
+            list(parse_requirements('Twis-Ted>=1.2-1'))
+            ==
+            [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
+        )
+        assert (
+            list(parse_requirements('Twisted >=1.2, \ # more\n<2.0'))
+            ==
+            [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
+        )
+        assert (
+            Requirement.parse("FooBar==1.99a3")
+            ==
+            Requirement("FooBar", [('==','1.99a3')], ())
+        )
+        with pytest.raises(ValueError):
+            Requirement.parse(">=2.3")
+        with pytest.raises(ValueError):
+            Requirement.parse("x\\")
+        with pytest.raises(ValueError):
+            Requirement.parse("x==2 q")
+        with pytest.raises(ValueError):
+            Requirement.parse("X==1\nY==2")
+        with pytest.raises(ValueError):
+            Requirement.parse("#")
+
+    def testVersionEquality(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            assert p1 == p2, (s1,s2,p1,p2)
+
+        c('1.2-rc1', '1.2rc1')
+        c('0.4', '0.4.0')
+        c('0.4.0.0', '0.4.0')
+        c('0.4.0-0', '0.4-0')
+        c('0post1', '0.0post1')
+        c('0pre1', '0.0c1')
+        c('0.0.0preview1', '0c1')
+        c('0.0c1', '0-rc1')
+        c('1.2a1', '1.2.a.1')
+        c('1.2.a', '1.2a')
+
+    def testVersionOrdering(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            assert p1 tuple(parse_version("2.0"))
+        assert parse_version("3.0") >= tuple(parse_version("2.0"))
+        assert parse_version("3.0") != tuple(parse_version("2.0"))
+        assert not (parse_version("3.0") != tuple(parse_version("3.0")))
+
+    def testVersionHashable(self):
+        """
+        Ensure that our versions stay hashable even though we've subclassed
+        them and added some shim code to them.
+        """
+        assert (
+            hash(parse_version("1.0"))
+            ==
+            hash(parse_version("1.0"))
+        )
+
+
+class TestNamespaces:
+
+    def setup_method(self, method):
+        self._ns_pkgs = pkg_resources._namespace_packages.copy()
+        self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-")
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
+        self._prev_sys_path = sys.path[:]
+        sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
+
+    def teardown_method(self, method):
+        shutil.rmtree(self._tmpdir)
+        pkg_resources._namespace_packages = self._ns_pkgs.copy()
+        sys.path = self._prev_sys_path[:]
+
+    @pytest.mark.skipif(os.path.islink(tempfile.gettempdir()),
+        reason="Test fails when /tmp is a symlink. See #231")
+    def test_two_levels_deep(self):
+        """
+        Test nested namespace packages
+        Create namespace packages in the following tree :
+            site-packages-1/pkg1/pkg2
+            site-packages-2/pkg1/pkg2
+        Check both are in the _namespace_packages dict and that their __path__
+        is correct
+        """
+        sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
+        ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
+        for site in ["site-pkgs", "site-pkgs2"]:
+            pkg1_init = open(os.path.join(self._tmpdir, site,
+                             "pkg1", "__init__.py"), "w")
+            pkg1_init.write(ns_str)
+            pkg1_init.close()
+            pkg2_init = open(os.path.join(self._tmpdir, site,
+                             "pkg1", "pkg2", "__init__.py"), "w")
+            pkg2_init.write(ns_str)
+            pkg2_init.close()
+        import pkg1
+        assert "pkg1" in pkg_resources._namespace_packages
+        # attempt to import pkg2 from site-pkgs2
+        import pkg1.pkg2
+        # check the _namespace_packages dict
+        assert "pkg1.pkg2" in pkg_resources._namespace_packages
+        assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
+        # check the __path__ attribute contains both paths
+        expected = [
+            os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
+            os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"),
+        ]
+        assert pkg1.pkg2.__path__ == expected
diff --git a/awx/lib/site-packages/setuptools/__init__.py b/awx/lib/site-packages/setuptools/__init__.py
index fc9b7b936c..8188f12528 100644
--- a/awx/lib/site-packages/setuptools/__init__.py
+++ b/awx/lib/site-packages/setuptools/__init__.py
@@ -1,16 +1,17 @@
 """Extensions to the 'distutils' for large or complex distributions"""
 
 import os
-import sys
 import distutils.core
 import distutils.filelist
 from distutils.core import Command as _Command
 from distutils.util import convert_path
+from fnmatch import fnmatchcase
 
 import setuptools.version
 from setuptools.extension import Extension
 from setuptools.dist import Distribution, Feature, _get_unpatched
 from setuptools.depends import Require
+from setuptools.compat import filterfalse
 
 __all__ = [
     'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
@@ -27,33 +28,87 @@ run_2to3_on_doctests = True
 # Standard package names for fixer packages
 lib2to3_fixer_packages = ['lib2to3.fixes']
 
-def find_packages(where='.', exclude=()):
-    """Return a list all Python packages found within directory 'where'
 
-    'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
-    will be converted to the appropriate local path syntax.  'exclude' is a
-    sequence of package names to exclude; '*' can be used as a wildcard in the
-    names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
-    'foo' itself).
-    """
-    out = []
-    stack=[(convert_path(where), '')]
-    while stack:
-        where,prefix = stack.pop(0)
-        for name in os.listdir(where):
-            fn = os.path.join(where,name)
-            looks_like_package = (
-                '.' not in name
-                and os.path.isdir(fn)
-                and os.path.isfile(os.path.join(fn, '__init__.py'))
-            )
-            if looks_like_package:
-                out.append(prefix+name)
-                stack.append((fn, prefix+name+'.'))
-    for pat in list(exclude)+['ez_setup']:
-        from fnmatch import fnmatchcase
-        out = [item for item in out if not fnmatchcase(item,pat)]
-    return out
+class PackageFinder(object):
+    @classmethod
+    def find(cls, where='.', exclude=(), include=('*',)):
+        """Return a list all Python packages found within directory 'where'
+
+        'where' should be supplied as a "cross-platform" (i.e. URL-style)
+        path; it will be converted to the appropriate local path syntax.
+        'exclude' is a sequence of package names to exclude; '*' can be used
+        as a wildcard in the names, such that 'foo.*' will exclude all
+        subpackages of 'foo' (but not 'foo' itself).
+
+        'include' is a sequence of package names to include.  If it's
+        specified, only the named packages will be included.  If it's not
+        specified, all found packages will be included.  'include' can contain
+        shell style wildcard patterns just like 'exclude'.
+
+        The list of included packages is built up first and then any
+        explicitly excluded packages are removed from it.
+        """
+        out = cls._find_packages_iter(convert_path(where))
+        out = cls.require_parents(out)
+        includes = cls._build_filter(*include)
+        excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude)
+        out = filter(includes, out)
+        out = filterfalse(excludes, out)
+        return list(out)
+
+    @staticmethod
+    def require_parents(packages):
+        """
+        Exclude any apparent package that apparently doesn't include its
+        parent.
+
+        For example, exclude 'foo.bar' if 'foo' is not present.
+        """
+        found = []
+        for pkg in packages:
+            base, sep, child = pkg.rpartition('.')
+            if base and base not in found:
+                continue
+            found.append(pkg)
+            yield pkg
+
+    @staticmethod
+    def _all_dirs(base_path):
+        """
+        Return all dirs in base_path, relative to base_path
+        """
+        for root, dirs, files in os.walk(base_path, followlinks=True):
+            for dir in dirs:
+                yield os.path.relpath(os.path.join(root, dir), base_path)
+
+    @classmethod
+    def _find_packages_iter(cls, base_path):
+        dirs = cls._all_dirs(base_path)
+        suitable = filterfalse(lambda n: '.' in n, dirs)
+        return (
+            path.replace(os.path.sep, '.')
+            for path in suitable
+            if cls._looks_like_package(os.path.join(base_path, path))
+        )
+
+    @staticmethod
+    def _looks_like_package(path):
+        return os.path.isfile(os.path.join(path, '__init__.py'))
+
+    @staticmethod
+    def _build_filter(*patterns):
+        """
+        Given a list of patterns, return a callable that will be true only if
+        the input matches one of the patterns.
+        """
+        return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
+
+class PEP420PackageFinder(PackageFinder):
+    @staticmethod
+    def _looks_like_package(path):
+        return True
+
+find_packages = PackageFinder.find
 
 setup = distutils.core.setup
 
@@ -83,7 +138,7 @@ def findall(dir = os.curdir):
     (relative to 'dir').
     """
     all_files = []
-    for base, dirs, files in os.walk(dir):
+    for base, dirs, files in os.walk(dir, followlinks=True):
         if base==os.curdir or base.startswith(os.curdir+os.sep):
             base = base[2:]
         if base:
@@ -92,7 +147,3 @@ def findall(dir = os.curdir):
     return all_files
 
 distutils.filelist.findall = findall    # fix findall bug in distutils.
-
-# sys.dont_write_bytecode was introduced in Python 2.6.
-_dont_write_bytecode = getattr(sys, 'dont_write_bytecode',
-    bool(os.environ.get("PYTHONDONTWRITEBYTECODE")))
diff --git a/awx/lib/site-packages/setuptools/archive_util.py b/awx/lib/site-packages/setuptools/archive_util.py
index 1109f34677..b3c9fa5690 100644
--- a/awx/lib/site-packages/setuptools/archive_util.py
+++ b/awx/lib/site-packages/setuptools/archive_util.py
@@ -6,42 +6,25 @@ __all__ = [
     "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
 ]
 
-import zipfile, tarfile, os, shutil, posixpath
-from pkg_resources import ensure_directory
+import zipfile
+import tarfile
+import os
+import shutil
+import posixpath
+import contextlib
+from pkg_resources import ensure_directory, ContextualZipFile
 from distutils.errors import DistutilsError
 
 class UnrecognizedFormat(DistutilsError):
     """Couldn't recognize the archive type"""
 
 def default_filter(src,dst):
-    """The default progress/filter callback; returns True for all files"""   
+    """The default progress/filter callback; returns True for all files"""
     return dst
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
 def unpack_archive(filename, extract_dir, progress_filter=default_filter,
-    drivers=None
-):
+        drivers=None):
     """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
 
     `progress_filter` is a function taking two arguments: a source path
@@ -75,52 +58,33 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter,
         )
 
 
-
-
-
-
-
 def unpack_directory(filename, extract_dir, progress_filter=default_filter):
     """"Unpack" a directory, using the same interface as for archives
 
     Raises ``UnrecognizedFormat`` if `filename` is not a directory
     """
     if not os.path.isdir(filename):
-        raise UnrecognizedFormat("%s is not a directory" % (filename,))
+        raise UnrecognizedFormat("%s is not a directory" % filename)
 
-    paths = {filename:('',extract_dir)}
+    paths = {
+        filename: ('', extract_dir),
+    }
     for base, dirs, files in os.walk(filename):
-        src,dst = paths[base]
+        src, dst = paths[base]
         for d in dirs:
-            paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
+            paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
         for f in files:
-            name = src+f
-            target = os.path.join(dst,f)
-            target = progress_filter(src+f, target)
+            target = os.path.join(dst, f)
+            target = progress_filter(src + f, target)
             if not target:
-                continue    # skip non-files
+                # skip non-files
+                continue
             ensure_directory(target)
-            f = os.path.join(base,f)
+            f = os.path.join(base, f)
             shutil.copyfile(f, target)
             shutil.copystat(f, target)
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
 def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
     """Unpack zip `filename` to `extract_dir`
 
@@ -132,8 +96,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
     if not zipfile.is_zipfile(filename):
         raise UnrecognizedFormat("%s is not a zip file" % (filename,))
 
-    z = zipfile.ZipFile(filename)
-    try:
+    with ContextualZipFile(filename) as z:
         for info in z.infolist():
             name = info.filename
 
@@ -152,17 +115,11 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
                 # file
                 ensure_directory(target)
                 data = z.read(info.filename)
-                f = open(target,'wb')
-                try:
+                with open(target, 'wb') as f:
                     f.write(data)
-                finally:
-                    f.close()
-                    del data
             unix_attributes = info.external_attr >> 16
             if unix_attributes:
                 os.chmod(target, unix_attributes)
-    finally:
-        z.close()
 
 
 def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
@@ -178,19 +135,22 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
         raise UnrecognizedFormat(
             "%s is not a compressed or uncompressed tar file" % (filename,)
         )
-    try:
-        tarobj.chown = lambda *args: None   # don't do any chowning!
+    with contextlib.closing(tarobj):
+        # don't do any chowning!
+        tarobj.chown = lambda *args: None
         for member in tarobj:
             name = member.name
             # don't extract absolute paths or ones with .. in them
             if not name.startswith('/') and '..' not in name.split('/'):
                 prelim_dst = os.path.join(extract_dir, *name.split('/'))
 
-                # resolve any links and to extract the link targets as normal files
+                # resolve any links and to extract the link targets as normal
+                # files
                 while member is not None and (member.islnk() or member.issym()):
                     linkpath = member.linkname
                     if member.issym():
-                        linkpath = posixpath.join(posixpath.dirname(member.name), linkpath)
+                        base = posixpath.dirname(member.name)
+                        linkpath = posixpath.join(base, linkpath)
                         linkpath = posixpath.normpath(linkpath)
                     member = tarobj._getmember(linkpath)
 
@@ -200,11 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
                         if final_dst.endswith(os.sep):
                             final_dst = final_dst[:-1]
                         try:
-                            tarobj._extract_member(member, final_dst)  # XXX Ugh
+                            # XXX Ugh
+                            tarobj._extract_member(member, final_dst)
                         except tarfile.ExtractError:
-                            pass    # chown/chmod/mkfifo/mknode/makedev failed
+                            # chown/chmod/mkfifo/mknode/makedev failed
+                            pass
         return True
-    finally:
-        tarobj.close()
 
 extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/awx/lib/site-packages/setuptools/command/__init__.py b/awx/lib/site-packages/setuptools/command/__init__.py
index 29c9d75ad1..f6dbc39c40 100644
--- a/awx/lib/site-packages/setuptools/command/__init__.py
+++ b/awx/lib/site-packages/setuptools/command/__init__.py
@@ -5,10 +5,11 @@ __all__ = [
     'register', 'bdist_wininst', 'upload_docs',
 ]
 
-from setuptools.command import install_scripts
+from distutils.command.bdist import bdist
 import sys
 
-from distutils.command.bdist import bdist
+from setuptools.command import install_scripts
+
 
 if 'egg' not in bdist.format_commands:
     bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
diff --git a/awx/lib/site-packages/setuptools/command/alias.py b/awx/lib/site-packages/setuptools/command/alias.py
index 52384e1a28..452a9244ea 100644
--- a/awx/lib/site-packages/setuptools/command/alias.py
+++ b/awx/lib/site-packages/setuptools/command/alias.py
@@ -1,27 +1,26 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
+from distutils.errors import DistutilsOptionError
+
 from setuptools.command.setopt import edit_config, option_base, config_file
 
+
 def shquote(arg):
     """Quote an argument for later parsing by shlex.split()"""
     for c in '"', "'", "\\", "#":
-        if c in arg: return repr(arg)
+        if c in arg:
+            return repr(arg)
     if arg.split() != [arg]:
         return repr(arg)
-    return arg        
+    return arg
 
 
 class alias(option_base):
     """Define a shortcut that invokes one or more commands"""
-    
+
     description = "define a shortcut to invoke one or more commands"
     command_consumes_arguments = True
 
     user_options = [
-        ('remove',   'r', 'remove (unset) the alias'), 
+        ('remove', 'r', 'remove (unset) the alias'),
     ] + option_base.user_options
 
     boolean_options = option_base.boolean_options + ['remove']
@@ -49,7 +48,7 @@ class alias(option_base):
                 print("setup.py alias", format_alias(alias, aliases))
             return
 
-        elif len(self.args)==1:
+        elif len(self.args) == 1:
             alias, = self.args
             if self.remove:
                 command = None
@@ -61,9 +60,9 @@ class alias(option_base):
                 return
         else:
             alias = self.args[0]
-            command = ' '.join(map(shquote,self.args[1:]))
+            command = ' '.join(map(shquote, self.args[1:]))
 
-        edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
+        edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
 
 
 def format_alias(name, aliases):
@@ -76,7 +75,4 @@ def format_alias(name, aliases):
         source = ''
     else:
         source = '--filename=%r' % source
-    return source+name+' '+command
-            
-
-
+    return source + name + ' ' + command
diff --git a/awx/lib/site-packages/setuptools/command/bdist_egg.py b/awx/lib/site-packages/setuptools/command/bdist_egg.py
index c577615824..34fdeec21f 100644
--- a/awx/lib/site-packages/setuptools/command/bdist_egg.py
+++ b/awx/lib/site-packages/setuptools/command/bdist_egg.py
@@ -3,26 +3,33 @@
 Build .egg distributions"""
 
 # This module should be kept compatible with Python 2.3
-import sys, os, marshal
-from setuptools import Command
+from distutils.errors import DistutilsSetupError
 from distutils.dir_util import remove_tree, mkpath
+from distutils import log
+from types import CodeType
+import sys
+import os
+import marshal
+import textwrap
+
+from pkg_resources import get_build_platform, Distribution, ensure_directory
+from pkg_resources import EntryPoint
+from setuptools.compat import basestring
+from setuptools.extension import Library
+from setuptools import Command
+
 try:
     # Python 2.7 or >=3.2
     from sysconfig import get_path, get_python_version
+
     def _get_purelib():
         return get_path("purelib")
 except ImportError:
     from distutils.sysconfig import get_python_lib, get_python_version
+
     def _get_purelib():
         return get_python_lib(False)
 
-from distutils import log
-from distutils.errors import DistutilsSetupError
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
-from types import CodeType
-from setuptools.compat import basestring, next
-from setuptools.extension import Library
 
 def strip_module(filename):
     if '.' in filename:
@@ -31,66 +38,45 @@ def strip_module(filename):
         filename = filename[:-6]
     return filename
 
-def write_stub(resource, pyfile):
-    f = open(pyfile,'w')
-    f.write('\n'.join([
-        "def __bootstrap__():",
-        "   global __bootstrap__, __loader__, __file__",
-        "   import sys, pkg_resources, imp",
-        "   __file__ = pkg_resources.resource_filename(__name__,%r)"
-            % resource,
-        "   __loader__ = None; del __bootstrap__, __loader__",
-        "   imp.load_dynamic(__name__,__file__)",
-        "__bootstrap__()",
-        "" # terminal \n
-    ]))
-    f.close()
 
-# stub __init__.py for packages distributed without one
-NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
+def write_stub(resource, pyfile):
+    _stub_template = textwrap.dedent("""
+        def __bootstrap__():
+            global __bootstrap__, __loader__, __file__
+            import sys, pkg_resources, imp
+            __file__ = pkg_resources.resource_filename(__name__, %r)
+            __loader__ = None; del __bootstrap__, __loader__
+            imp.load_dynamic(__name__,__file__)
+        __bootstrap__()
+        """).lstrip()
+    with open(pyfile, 'w') as f:
+        f.write(_stub_template % resource)
+
 
 class bdist_egg(Command):
-
     description = "create an \"egg\" distribution"
 
     user_options = [
         ('bdist-dir=', 'b',
-            "temporary directory for creating the distribution"),
-        ('plat-name=', 'p',
-                     "platform name to embed in generated filenames "
-                     "(default: %s)" % get_build_platform()),
+         "temporary directory for creating the distribution"),
+        ('plat-name=', 'p', "platform name to embed in generated filenames "
+                            "(default: %s)" % get_build_platform()),
         ('exclude-source-files', None,
-                     "remove all .py files from the generated egg"),
+         "remove all .py files from the generated egg"),
         ('keep-temp', 'k',
-                     "keep the pseudo-installation tree around after " +
-                     "creating the distribution archive"),
+         "keep the pseudo-installation tree around after " +
+         "creating the distribution archive"),
         ('dist-dir=', 'd',
-                     "directory to put final built distributions in"),
+         "directory to put final built distributions in"),
         ('skip-build', None,
-                     "skip rebuilding everything (for testing/debugging)"),
+         "skip rebuilding everything (for testing/debugging)"),
     ]
 
     boolean_options = [
         'keep-temp', 'skip-build', 'exclude-source-files'
     ]
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def initialize_options (self):
+    def initialize_options(self):
         self.bdist_dir = None
         self.plat_name = None
         self.keep_temp = 0
@@ -99,7 +85,6 @@ class bdist_egg(Command):
         self.egg_output = None
         self.exclude_source_files = None
 
-
     def finalize_options(self):
         ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
         self.egg_info = ei_cmd.egg_info
@@ -111,7 +96,7 @@ class bdist_egg(Command):
         if self.plat_name is None:
             self.plat_name = get_build_platform()
 
-        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
 
         if self.egg_output is None:
 
@@ -122,32 +107,25 @@ class bdist_egg(Command):
                 self.distribution.has_ext_modules() and self.plat_name
             ).egg_name()
 
-            self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
-
-
-
-
-
-
-
+            self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
 
     def do_install_data(self):
         # Hack for packages that install data to install's --install-lib
         self.get_finalized_command('install').install_lib = self.bdist_dir
 
         site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
-        old, self.distribution.data_files = self.distribution.data_files,[]
+        old, self.distribution.data_files = self.distribution.data_files, []
 
         for item in old:
-            if isinstance(item,tuple) and len(item)==2:
+            if isinstance(item, tuple) and len(item) == 2:
                 if os.path.isabs(item[0]):
                     realpath = os.path.realpath(item[0])
                     normalized = os.path.normcase(realpath)
-                    if normalized==site_packages or normalized.startswith(
-                        site_packages+os.sep
+                    if normalized == site_packages or normalized.startswith(
+                        site_packages + os.sep
                     ):
-                        item = realpath[len(site_packages)+1:], item[1]
-                    # XXX else: raise ???
+                        item = realpath[len(site_packages) + 1:], item[1]
+                        # XXX else: raise ???
             self.distribution.data_files.append(item)
 
         try:
@@ -156,22 +134,19 @@ class bdist_egg(Command):
         finally:
             self.distribution.data_files = old
 
-
     def get_outputs(self):
         return [self.egg_output]
 
-
-    def call_command(self,cmdname,**kw):
+    def call_command(self, cmdname, **kw):
         """Invoke reinitialized command `cmdname` with keyword args"""
         for dirname in INSTALL_DIRECTORY_ATTRS:
-            kw.setdefault(dirname,self.bdist_dir)
-        kw.setdefault('skip_build',self.skip_build)
+            kw.setdefault(dirname, self.bdist_dir)
+        kw.setdefault('skip_build', self.skip_build)
         kw.setdefault('dry_run', self.dry_run)
         cmd = self.reinitialize_command(cmdname, **kw)
         self.run_command(cmdname)
         return cmd
 
-
     def run(self):
         # Generate metadata first
         self.run_command("egg_info")
@@ -179,7 +154,8 @@ class bdist_egg(Command):
         # pull their data path from the install_lib command.
         log.info("installing library code to %s" % self.bdist_dir)
         instcmd = self.get_finalized_command('install')
-        old_root = instcmd.root; instcmd.root = None
+        old_root = instcmd.root
+        instcmd.root = None
         if self.distribution.has_c_libraries() and not self.skip_build:
             self.run_command('build_clib')
         cmd = self.call_command('install_lib', warn_dir=0)
@@ -188,17 +164,17 @@ class bdist_egg(Command):
         all_outputs, ext_outputs = self.get_ext_outputs()
         self.stubs = []
         to_compile = []
-        for (p,ext_name) in enumerate(ext_outputs):
-            filename,ext = os.path.splitext(ext_name)
-            pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
+        for (p, ext_name) in enumerate(ext_outputs):
+            filename, ext = os.path.splitext(ext_name)
+            pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
+                                  '.py')
             self.stubs.append(pyfile)
             log.info("creating stub loader for %s" % ext_name)
             if not self.dry_run:
                 write_stub(os.path.basename(ext_name), pyfile)
             to_compile.append(pyfile)
-            ext_outputs[p] = ext_name.replace(os.sep,'/')
+            ext_outputs[p] = ext_name.replace(os.sep, '/')
 
-        to_compile.extend(self.make_init_files())
         if to_compile:
             cmd.byte_compile(to_compile)
         if self.distribution.data_files:
@@ -206,12 +182,13 @@ class bdist_egg(Command):
 
         # Make the EGG-INFO directory
         archive_root = self.bdist_dir
-        egg_info = os.path.join(archive_root,'EGG-INFO')
+        egg_info = os.path.join(archive_root, 'EGG-INFO')
         self.mkpath(egg_info)
         if self.distribution.scripts:
             script_dir = os.path.join(egg_info, 'scripts')
             log.info("installing scripts to %s" % script_dir)
-            self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
+            self.call_command('install_scripts', install_dir=script_dir,
+                              no_ep=1)
 
         self.copy_metadata_to(egg_info)
         native_libs = os.path.join(egg_info, "native_libs.txt")
@@ -229,10 +206,10 @@ class bdist_egg(Command):
                 os.unlink(native_libs)
 
         write_safety_flag(
-            os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
+            os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
         )
 
-        if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
+        if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
             log.warn(
                 "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
                 "Use the install_requires/extras_require setup() args instead."
@@ -243,61 +220,33 @@ class bdist_egg(Command):
 
         # Make the archive
         make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
-                          dry_run=self.dry_run, mode=self.gen_header())
+                     dry_run=self.dry_run, mode=self.gen_header())
         if not self.keep_temp:
             remove_tree(self.bdist_dir, dry_run=self.dry_run)
 
         # Add to 'Distribution.dist_files' so that the "upload" command works
-        getattr(self.distribution,'dist_files',[]).append(
-            ('bdist_egg',get_python_version(),self.egg_output))
-
-
-
+        getattr(self.distribution, 'dist_files', []).append(
+            ('bdist_egg', get_python_version(), self.egg_output))
 
     def zap_pyfiles(self):
         log.info("Removing .py files from temporary directory")
-        for base,dirs,files in walk_egg(self.bdist_dir):
+        for base, dirs, files in walk_egg(self.bdist_dir):
             for name in files:
                 if name.endswith('.py'):
-                    path = os.path.join(base,name)
+                    path = os.path.join(base, name)
                     log.debug("Deleting %s", path)
                     os.unlink(path)
 
     def zip_safe(self):
-        safe = getattr(self.distribution,'zip_safe',None)
+        safe = getattr(self.distribution, 'zip_safe', None)
         if safe is not None:
             return safe
         log.warn("zip_safe flag not set; analyzing archive contents...")
         return analyze_egg(self.bdist_dir, self.stubs)
 
-    def make_init_files(self):
-        """Create missing package __init__ files"""
-        init_files = []
-        for base,dirs,files in walk_egg(self.bdist_dir):
-            if base==self.bdist_dir:
-                # don't put an __init__ in the root
-                continue
-            for name in files:
-                if name.endswith('.py'):
-                    if '__init__.py' not in files:
-                        pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
-                        if self.distribution.has_contents_for(pkg):
-                            log.warn("Creating missing __init__.py for %s",pkg)
-                            filename = os.path.join(base,'__init__.py')
-                            if not self.dry_run:
-                                f = open(filename,'w'); f.write(NS_PKG_STUB)
-                                f.close()
-                            init_files.append(filename)
-                    break
-            else:
-                # not a package, don't traverse to subdirectories
-                dirs[:] = []
-
-        return init_files
-
     def gen_header(self):
         epm = EntryPoint.parse_map(self.distribution.entry_points or '')
-        ep = epm.get('setuptools.installation',{}).get('eggsecutable')
+        ep = epm.get('setuptools.installation', {}).get('eggsecutable')
         if ep is None:
             return 'w'  # not an eggsecutable, do it the usual way.
 
@@ -325,7 +274,6 @@ class bdist_egg(Command):
             '  echo Please rename it back to %(basename)s and try again.\n'
             '  exec false\n'
             'fi\n'
-
         ) % locals()
 
         if not self.dry_run:
@@ -335,13 +283,12 @@ class bdist_egg(Command):
             f.close()
         return 'a'
 
-
     def copy_metadata_to(self, target_dir):
         "Copy metadata (egg info) to the target_dir"
         # normalize the path (so that a forward-slash in egg_info will
         # match using startswith below)
         norm_egg_info = os.path.normpath(self.egg_info)
-        prefix = os.path.join(norm_egg_info,'')
+        prefix = os.path.join(norm_egg_info, '')
         for path in self.ei_cmd.filelist.files:
             if path.startswith(prefix):
                 target = os.path.join(target_dir, path[len(prefix):])
@@ -354,23 +301,24 @@ class bdist_egg(Command):
         all_outputs = []
         ext_outputs = []
 
-        paths = {self.bdist_dir:''}
+        paths = {self.bdist_dir: ''}
         for base, dirs, files in os.walk(self.bdist_dir):
             for filename in files:
                 if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
-                    all_outputs.append(paths[base]+filename)
+                    all_outputs.append(paths[base] + filename)
             for filename in dirs:
-                paths[os.path.join(base,filename)] = paths[base]+filename+'/'
+                paths[os.path.join(base, filename)] = (paths[base] +
+                                                       filename + '/')
 
         if self.distribution.has_ext_modules():
             build_cmd = self.get_finalized_command('build_ext')
             for ext in build_cmd.extensions:
-                if isinstance(ext,Library):
+                if isinstance(ext, Library):
                     continue
                 fullname = build_cmd.get_ext_fullname(ext.name)
                 filename = build_cmd.get_ext_filename(fullname)
                 if not os.path.basename(filename).startswith('dl-'):
-                    if os.path.exists(os.path.join(self.bdist_dir,filename)):
+                    if os.path.exists(os.path.join(self.bdist_dir, filename)):
                         ext_outputs.append(filename)
 
         return all_outputs, ext_outputs
@@ -379,24 +327,24 @@ class bdist_egg(Command):
 NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
 
 
-
-
 def walk_egg(egg_dir):
     """Walk an unpacked egg's contents, skipping the metadata directory"""
     walker = os.walk(egg_dir)
-    base,dirs,files = next(walker)
+    base, dirs, files = next(walker)
     if 'EGG-INFO' in dirs:
         dirs.remove('EGG-INFO')
-    yield base,dirs,files
+    yield base, dirs, files
     for bdf in walker:
         yield bdf
 
+
 def analyze_egg(egg_dir, stubs):
     # check for existing flag in EGG-INFO
-    for flag,fn in safety_flags.items():
-        if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
+    for flag, fn in safety_flags.items():
+        if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
             return flag
-    if not can_scan(): return False
+    if not can_scan():
+        return False
     safe = True
     for base, dirs, files in walk_egg(egg_dir):
         for name in files:
@@ -407,35 +355,42 @@ def analyze_egg(egg_dir, stubs):
                 safe = scan_module(egg_dir, base, name, stubs) and safe
     return safe
 
+
 def write_safety_flag(egg_dir, safe):
     # Write or remove zip safety flag file(s)
-    for flag,fn in safety_flags.items():
+    for flag, fn in safety_flags.items():
         fn = os.path.join(egg_dir, fn)
         if os.path.exists(fn):
             if safe is None or bool(safe) != flag:
                 os.unlink(fn)
-        elif safe is not None and bool(safe)==flag:
-            f=open(fn,'wt'); f.write('\n'); f.close()
+        elif safe is not None and bool(safe) == flag:
+            f = open(fn, 'wt')
+            f.write('\n')
+            f.close()
+
 
 safety_flags = {
     True: 'zip-safe',
     False: 'not-zip-safe',
 }
 
+
 def scan_module(egg_dir, base, name, stubs):
     """Check whether module possibly uses unsafe-for-zipfile stuff"""
 
-    filename = os.path.join(base,name)
+    filename = os.path.join(base, name)
     if filename[:-1] in stubs:
-        return True     # Extension module
-    pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
-    module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
+        return True  # Extension module
+    pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
+    module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
     if sys.version_info < (3, 3):
-        skip = 8   # skip magic & date
+        skip = 8  # skip magic & date
     else:
         skip = 12  # skip magic & date & file size
-    f = open(filename,'rb'); f.read(skip)
-    code = marshal.load(f); f.close()
+    f = open(filename, 'rb')
+    f.read(skip)
+    code = marshal.load(f)
+    f.close()
     safe = True
     symbols = dict.fromkeys(iter_symbols(code))
     for bad in ['__file__', '__path__']:
@@ -452,21 +407,24 @@ def scan_module(egg_dir, base, name, stubs):
                 log.warn("%s: module MAY be using inspect.%s", module, bad)
                 safe = False
     if '__name__' in symbols and '__main__' in symbols and '.' not in module:
-        if sys.version[:3]=="2.4":  # -m works w/zipfiles in 2.5
+        if sys.version[:3] == "2.4":  # -m works w/zipfiles in 2.5
             log.warn("%s: top-level module may be 'python -m' script", module)
             safe = False
     return safe
 
+
 def iter_symbols(code):
     """Yield names and strings used by `code` and its nested code objects"""
-    for name in code.co_names: yield name
+    for name in code.co_names:
+        yield name
     for const in code.co_consts:
-        if isinstance(const,basestring):
+        if isinstance(const, basestring):
             yield const
-        elif isinstance(const,CodeType):
+        elif isinstance(const, CodeType):
             for name in iter_symbols(const):
                 yield name
 
+
 def can_scan():
     if not sys.platform.startswith('java') and sys.platform != 'cli':
         # CPython, PyPy, etc.
@@ -475,39 +433,6 @@ def can_scan():
     log.warn("Please ask the author to include a 'zip_safe'"
              " setting (either True or False) in the package's setup.py")
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
 # Attribute names of options for commands that might need to be convinced to
 # install to the egg build directory
 
@@ -515,9 +440,9 @@ INSTALL_DIRECTORY_ATTRS = [
     'install_lib', 'install_dir', 'install_data', 'install_base'
 ]
 
+
 def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
-    mode='w'
-):
+                 mode='w'):
     """Create a zip file from all the files under 'base_dir'.  The output
     zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
     Python module (if available) or the InfoZIP "zip" utility (if installed
@@ -525,6 +450,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
     raises DistutilsExecError.  Returns the name of the output zip file.
     """
     import zipfile
+
     mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
     log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
 
@@ -532,13 +458,14 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
         for name in names:
             path = os.path.normpath(os.path.join(dirname, name))
             if os.path.isfile(path):
-                p = path[len(base_dir)+1:]
+                p = path[len(base_dir) + 1:]
                 if not dry_run:
                     z.write(path, p)
                 log.debug("adding '%s'" % p)
 
     if compress is None:
-        compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
+        # avoid 2.3 zipimport bug when 64 bits
+        compress = (sys.version >= "2.4")
 
     compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
     if not dry_run:
@@ -550,4 +477,3 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
         for dirname, dirs, files in os.walk(base_dir):
             visit(None, dirname, files)
     return zip_filename
-#
diff --git a/awx/lib/site-packages/setuptools/command/bdist_rpm.py b/awx/lib/site-packages/setuptools/command/bdist_rpm.py
index 8c48da3559..70730927ec 100644
--- a/awx/lib/site-packages/setuptools/command/bdist_rpm.py
+++ b/awx/lib/site-packages/setuptools/command/bdist_rpm.py
@@ -1,51 +1,30 @@
-# This is just a kludge so that bdist_rpm doesn't guess wrong about the
-# distribution name and version, if the egg_info command is going to alter
-# them, another kludge to allow you to build old-style non-egg RPMs, and
-# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
+import distutils.command.bdist_rpm as orig
 
-from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
-import sys, os
 
-class bdist_rpm(_bdist_rpm):
+class bdist_rpm(orig.bdist_rpm):
+    """
+    Override the default bdist_rpm behavior to do the following:
 
-    def initialize_options(self):
-        _bdist_rpm.initialize_options(self)
-        self.no_egg = None
-
-    if sys.version<"2.5":
-        # Track for uploading any .rpm file(s) moved to self.dist_dir
-        def move_file(self, src, dst, level=1):
-            _bdist_rpm.move_file(self, src, dst, level)
-            if dst==self.dist_dir and src.endswith('.rpm'):
-                getattr(self.distribution,'dist_files',[]).append(
-                    ('bdist_rpm',
-                    src.endswith('.src.rpm') and 'any' or sys.version[:3],
-                     os.path.join(dst, os.path.basename(src)))
-                )
+    1. Run egg_info to ensure the name and version are properly calculated.
+    2. Always run 'install' using --single-version-externally-managed to
+       disable eggs in RPM distributions.
+    3. Replace dash with underscore in the version numbers for better RPM
+       compatibility.
+    """
 
     def run(self):
-        self.run_command('egg_info')    # ensure distro name is up-to-date
-        _bdist_rpm.run(self)
-
-
-
-
-
-
-
-
-
-
-
+        # ensure distro name is up-to-date
+        self.run_command('egg_info')
 
+        orig.bdist_rpm.run(self)
 
     def _make_spec_file(self):
         version = self.distribution.get_version()
-        rpmversion = version.replace('-','_')
-        spec = _bdist_rpm._make_spec_file(self)
-        line23 = '%define version '+version
-        line24 = '%define version '+rpmversion
-        spec  = [
+        rpmversion = version.replace('-', '_')
+        spec = orig.bdist_rpm._make_spec_file(self)
+        line23 = '%define version ' + version
+        line24 = '%define version ' + rpmversion
+        spec = [
             line.replace(
                 "Source0: %{name}-%{version}.tar",
                 "Source0: %{name}-%{unmangled_version}.tar"
@@ -55,28 +34,10 @@ class bdist_rpm(_bdist_rpm):
             ).replace(
                 "%setup",
                 "%setup -n %{name}-%{unmangled_version}"
-            ).replace(line23,line24)
+            ).replace(line23, line24)
             for line in spec
         ]
-        spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
+        insert_loc = spec.index(line24) + 1
+        unmangled_version = "%define unmangled_version " + version
+        spec.insert(insert_loc, unmangled_version)
         return spec
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/command/bdist_wininst.py b/awx/lib/site-packages/setuptools/command/bdist_wininst.py
index e8521f834c..073de97b46 100644
--- a/awx/lib/site-packages/setuptools/command/bdist_wininst.py
+++ b/awx/lib/site-packages/setuptools/command/bdist_wininst.py
@@ -1,82 +1,21 @@
-from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
-import os, sys
+import distutils.command.bdist_wininst as orig
 
-class bdist_wininst(_bdist_wininst):
-    _good_upload = _bad_upload = None
 
-    def create_exe(self, arcname, fullname, bitmap=None):
-        _bdist_wininst.create_exe(self, arcname, fullname, bitmap)
-        installer_name = self.get_installer_filename(fullname) 
-        if self.target_version:
-            pyversion = self.target_version
-            # fix 2.5+ bdist_wininst ignoring --target-version spec
-            self._bad_upload = ('bdist_wininst', 'any', installer_name)
-        else:
-            pyversion = 'any'
-        self._good_upload = ('bdist_wininst', pyversion, installer_name)
-        
-    def _fix_upload_names(self):
-        good, bad = self._good_upload, self._bad_upload
-        dist_files = getattr(self.distribution, 'dist_files', [])
-        if bad in dist_files:
-            dist_files.remove(bad)
-        if good not in dist_files:
-            dist_files.append(good)
-
-    def reinitialize_command (self, command, reinit_subcommands=0):
+class bdist_wininst(orig.bdist_wininst):
+    def reinitialize_command(self, command, reinit_subcommands=0):
+        """
+        Supplement reinitialize_command to work around
+        http://bugs.python.org/issue20819
+        """
         cmd = self.distribution.reinitialize_command(
             command, reinit_subcommands)
         if command in ('install', 'install_lib'):
-            cmd.install_lib = None  # work around distutils bug
+            cmd.install_lib = None
         return cmd
 
     def run(self):
         self._is_running = True
         try:
-            _bdist_wininst.run(self)
-            self._fix_upload_names()
+            orig.bdist_wininst.run(self)
         finally:
             self._is_running = False
-
-
-    if not hasattr(_bdist_wininst, 'get_installer_filename'):
-        def get_installer_filename(self, fullname):
-            # Factored out to allow overriding in subclasses
-            if self.target_version:
-                # if we create an installer for a specific python version,
-                # it's better to include this in the name
-                installer_name = os.path.join(self.dist_dir,
-                                              "%s.win32-py%s.exe" %
-                                               (fullname, self.target_version))
-            else:
-                installer_name = os.path.join(self.dist_dir,
-                                              "%s.win32.exe" % fullname)
-            return installer_name
-    # get_installer_filename()
-    
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/command/build_ext.py b/awx/lib/site-packages/setuptools/command/build_ext.py
index 50a039ce50..e4b2c593f3 100644
--- a/awx/lib/site-packages/setuptools/command/build_ext.py
+++ b/awx/lib/site-packages/setuptools/command/build_ext.py
@@ -1,25 +1,30 @@
 from distutils.command.build_ext import build_ext as _du_build_ext
+from distutils.file_util import copy_file
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler
+from distutils.errors import DistutilsError
+from distutils import log
+import os
+import sys
+import itertools
+
+from setuptools.extension import Library
+
 try:
     # Attempt to use Pyrex for building extensions, if available
     from Pyrex.Distutils.build_ext import build_ext as _build_ext
 except ImportError:
     _build_ext = _du_build_ext
 
-import os, sys
-from distutils.file_util import copy_file
-from setuptools.extension import Library
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler
 try:
     # Python 2.7 or >=3.2
     from sysconfig import _CONFIG_VARS
 except ImportError:
     from distutils.sysconfig import get_config_var
+
     get_config_var("LDSHARED")  # make sure _config_vars is initialized
     del get_config_var
     from distutils.sysconfig import _config_vars as _CONFIG_VARS
-from distutils import log
-from distutils.errors import *
 
 have_rtld = False
 use_stubs = False
@@ -29,20 +34,13 @@ if sys.platform == "darwin":
     use_stubs = True
 elif os.name != 'nt':
     try:
-        from dl import RTLD_NOW
-        have_rtld = True
-        use_stubs = True
+        import dl
+        use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
     except ImportError:
         pass
 
-def if_dl(s):
-    if have_rtld:
-        return s
-    return ''
-
-
-
 
+if_dl = lambda s: s if have_rtld else ''
 
 
 class build_ext(_build_ext):
@@ -62,8 +60,9 @@ class build_ext(_build_ext):
             modpath = fullname.split('.')
             package = '.'.join(modpath[:-1])
             package_dir = build_py.get_package_dir(package)
-            dest_filename = os.path.join(package_dir,os.path.basename(filename))
-            src_filename = os.path.join(self.build_lib,filename)
+            dest_filename = os.path.join(package_dir,
+                                         os.path.basename(filename))
+            src_filename = os.path.join(self.build_lib, filename)
 
             # Always copy, even if source is older than destination, to ensure
             # that the right extensions for the current Python/platform are
@@ -75,8 +74,8 @@ class build_ext(_build_ext):
             if ext._needs_stub:
                 self.write_stub(package_dir or os.curdir, ext, True)
 
-
-    if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
+    if _build_ext is not _du_build_ext and not hasattr(_build_ext,
+                                                       'pyrex_sources'):
         # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
         def swig_sources(self, sources, *otherargs):
             # first do any Pyrex processing
@@ -84,18 +83,16 @@ class build_ext(_build_ext):
             # Then do any actual SWIG stuff on the remainder
             return _du_build_ext.swig_sources(self, sources, *otherargs)
 
-
-
     def get_ext_filename(self, fullname):
-        filename = _build_ext.get_ext_filename(self,fullname)
+        filename = _build_ext.get_ext_filename(self, fullname)
         if fullname in self.ext_map:
             ext = self.ext_map[fullname]
-            if isinstance(ext,Library):
+            if isinstance(ext, Library):
                 fn, ext = os.path.splitext(filename)
-                return self.shlib_compiler.library_filename(fn,libtype)
+                return self.shlib_compiler.library_filename(fn, libtype)
             elif use_stubs and ext._links_to_dynamic:
-                d,fn = os.path.split(filename)
-                return os.path.join(d,'dl-'+fn)
+                d, fn = os.path.split(filename)
+                return os.path.join(d, 'dl-' + fn)
         return filename
 
     def initialize_options(self):
@@ -109,7 +106,7 @@ class build_ext(_build_ext):
         self.extensions = self.extensions or []
         self.check_extensions_list(self.extensions)
         self.shlibs = [ext for ext in self.extensions
-                        if isinstance(ext,Library)]
+                       if isinstance(ext, Library)]
         if self.shlibs:
             self.setup_shlib_compiler()
         for ext in self.extensions:
@@ -122,11 +119,12 @@ class build_ext(_build_ext):
             # XXX what to do with conflicts?
             self.ext_map[fullname.split('.')[-1]] = ext
 
-            ltd = ext._links_to_dynamic = \
-                self.shlibs and self.links_to_dynamic(ext) or False
-            ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
+            ltd = self.shlibs and self.links_to_dynamic(ext) or False
+            ns = ltd and use_stubs and not isinstance(ext, Library)
+            ext._links_to_dynamic = ltd
+            ext._needs_stub = ns
             filename = ext._file_name = self.get_ext_filename(fullname)
-            libdir = os.path.dirname(os.path.join(self.build_lib,filename))
+            libdir = os.path.dirname(os.path.join(self.build_lib, filename))
             if ltd and libdir not in ext.library_dirs:
                 ext.library_dirs.append(libdir)
             if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
@@ -140,7 +138,8 @@ class build_ext(_build_ext):
             tmp = _CONFIG_VARS.copy()
             try:
                 # XXX Help!  I don't have any idea whether these are right...
-                _CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
+                _CONFIG_VARS['LDSHARED'] = (
+                    "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
                 _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
                 _CONFIG_VARS['SO'] = ".dylib"
                 customize_compiler(compiler)
@@ -154,7 +153,7 @@ class build_ext(_build_ext):
             compiler.set_include_dirs(self.include_dirs)
         if self.define is not None:
             # 'define' option is a list of (name,value) tuples
-            for (name,value) in self.define:
+            for (name, value) in self.define:
                 compiler.define_macro(name, value)
         if self.undef is not None:
             for macro in self.undef:
@@ -171,23 +170,20 @@ class build_ext(_build_ext):
         # hack so distutils' build_extension() builds a library instead
         compiler.link_shared_object = link_shared_object.__get__(compiler)
 
-
-
     def get_export_symbols(self, ext):
-        if isinstance(ext,Library):
+        if isinstance(ext, Library):
             return ext.export_symbols
-        return _build_ext.get_export_symbols(self,ext)
+        return _build_ext.get_export_symbols(self, ext)
 
     def build_extension(self, ext):
         _compiler = self.compiler
         try:
-            if isinstance(ext,Library):
+            if isinstance(ext, Library):
                 self.compiler = self.shlib_compiler
-            _build_ext.build_extension(self,ext)
+            _build_ext.build_extension(self, ext)
             if ext._needs_stub:
-                self.write_stub(
-                    self.get_finalized_command('build_py').build_lib, ext
-                )
+                cmd = self.get_finalized_command('build_py').build_lib
+                self.write_stub(cmd, ext)
         finally:
             self.compiler = _compiler
 
@@ -197,54 +193,66 @@ class build_ext(_build_ext):
         # XXX as dynamic, and not just using a locally-found version or a
         # XXX static-compiled version
         libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
-        pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
-        for libname in ext.libraries:
-            if pkg+libname in libnames: return True
-        return False
+        pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
+        return any(pkg + libname in libnames for libname in ext.libraries)
 
     def get_outputs(self):
-        outputs = _build_ext.get_outputs(self)
-        optimize = self.get_finalized_command('build_py').optimize
-        for ext in self.extensions:
-            if ext._needs_stub:
-                base = os.path.join(self.build_lib, *ext._full_name.split('.'))
-                outputs.append(base+'.py')
-                outputs.append(base+'.pyc')
-                if optimize:
-                    outputs.append(base+'.pyo')
-        return outputs
+        return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
+
+    def __get_stubs_outputs(self):
+        # assemble the base name for each extension that needs a stub
+        ns_ext_bases = (
+            os.path.join(self.build_lib, *ext._full_name.split('.'))
+            for ext in self.extensions
+            if ext._needs_stub
+        )
+        # pair each base with the extension
+        pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
+        return list(base + fnext for base, fnext in pairs)
+
+    def __get_output_extensions(self):
+        yield '.py'
+        yield '.pyc'
+        if self.get_finalized_command('build_py').optimize:
+            yield '.pyo'
 
     def write_stub(self, output_dir, ext, compile=False):
-        log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
-        stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
+        log.info("writing stub loader for %s to %s", ext._full_name,
+                 output_dir)
+        stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
+                     '.py')
         if compile and os.path.exists(stub_file):
-            raise DistutilsError(stub_file+" already exists! Please delete.")
+            raise DistutilsError(stub_file + " already exists! Please delete.")
         if not self.dry_run:
-            f = open(stub_file,'w')
-            f.write('\n'.join([
-                "def __bootstrap__():",
-                "   global __bootstrap__, __file__, __loader__",
-                "   import sys, os, pkg_resources, imp"+if_dl(", dl"),
-                "   __file__ = pkg_resources.resource_filename(__name__,%r)"
-                   % os.path.basename(ext._file_name),
-                "   del __bootstrap__",
-                "   if '__loader__' in globals():",
-                "       del __loader__",
-                if_dl("   old_flags = sys.getdlopenflags()"),
-                "   old_dir = os.getcwd()",
-                "   try:",
-                "     os.chdir(os.path.dirname(__file__))",
-                if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
-                "     imp.load_dynamic(__name__,__file__)",
-                "   finally:",
-                if_dl("     sys.setdlopenflags(old_flags)"),
-                "     os.chdir(old_dir)",
-                "__bootstrap__()",
-                "" # terminal \n
-            ]))
+            f = open(stub_file, 'w')
+            f.write(
+                '\n'.join([
+                    "def __bootstrap__():",
+                    "   global __bootstrap__, __file__, __loader__",
+                    "   import sys, os, pkg_resources, imp" + if_dl(", dl"),
+                    "   __file__ = pkg_resources.resource_filename"
+                    "(__name__,%r)"
+                    % os.path.basename(ext._file_name),
+                    "   del __bootstrap__",
+                    "   if '__loader__' in globals():",
+                    "       del __loader__",
+                    if_dl("   old_flags = sys.getdlopenflags()"),
+                    "   old_dir = os.getcwd()",
+                    "   try:",
+                    "     os.chdir(os.path.dirname(__file__))",
+                    if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
+                    "     imp.load_dynamic(__name__,__file__)",
+                    "   finally:",
+                    if_dl("     sys.setdlopenflags(old_flags)"),
+                    "     os.chdir(old_dir)",
+                    "__bootstrap__()",
+                    ""  # terminal \n
+                ])
+            )
             f.close()
         if compile:
             from distutils.util import byte_compile
+
             byte_compile([stub_file], optimize=0,
                          force=True, dry_run=self.dry_run)
             optimize = self.get_finalized_command('install_lib').optimize
@@ -255,14 +263,15 @@ class build_ext(_build_ext):
                 os.unlink(stub_file)
 
 
-if use_stubs or os.name=='nt':
+if use_stubs or os.name == 'nt':
     # Build shared libraries
     #
-    def link_shared_object(self, objects, output_libname, output_dir=None,
-        libraries=None, library_dirs=None, runtime_library_dirs=None,
-        export_symbols=None, debug=0, extra_preargs=None,
-        extra_postargs=None, build_temp=None, target_lang=None
-    ):  self.link(
+    def link_shared_object(
+            self, objects, output_libname, output_dir=None, libraries=None,
+            library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+            debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+            target_lang=None):
+        self.link(
             self.SHARED_LIBRARY, objects, output_libname,
             output_dir, libraries, library_dirs, runtime_library_dirs,
             export_symbols, debug, extra_preargs, extra_postargs,
@@ -272,19 +281,19 @@ else:
     # Build static libraries everywhere else
     libtype = 'static'
 
-    def link_shared_object(self, objects, output_libname, output_dir=None,
-        libraries=None, library_dirs=None, runtime_library_dirs=None,
-        export_symbols=None, debug=0, extra_preargs=None,
-        extra_postargs=None, build_temp=None, target_lang=None
-    ):
+    def link_shared_object(
+            self, objects, output_libname, output_dir=None, libraries=None,
+            library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+            debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+            target_lang=None):
         # XXX we need to either disallow these attrs on Library instances,
-        #     or warn/abort here if set, or something...
-        #libraries=None, library_dirs=None, runtime_library_dirs=None,
-        #export_symbols=None, extra_preargs=None, extra_postargs=None,
-        #build_temp=None
+        # or warn/abort here if set, or something...
+        # libraries=None, library_dirs=None, runtime_library_dirs=None,
+        # export_symbols=None, extra_preargs=None, extra_postargs=None,
+        # build_temp=None
 
-        assert output_dir is None   # distutils build_ext doesn't pass this
-        output_dir,filename = os.path.split(output_libname)
+        assert output_dir is None  # distutils build_ext doesn't pass this
+        output_dir, filename = os.path.split(output_libname)
         basename, ext = os.path.splitext(filename)
         if self.library_filename("x").startswith('lib'):
             # strip 'lib' prefix; this is kludgy if some platform uses
@@ -294,5 +303,3 @@ else:
         self.create_static_lib(
             objects, basename, output_dir, debug, target_lang
         )
-
-
diff --git a/awx/lib/site-packages/setuptools/command/build_py.py b/awx/lib/site-packages/setuptools/command/build_py.py
index 090b44d265..98080694ad 100644
--- a/awx/lib/site-packages/setuptools/command/build_py.py
+++ b/awx/lib/site-packages/setuptools/command/build_py.py
@@ -1,10 +1,10 @@
+from glob import glob
+from distutils.util import convert_path
+import distutils.command.build_py as orig
 import os
 import sys
 import fnmatch
 import textwrap
-from distutils.command.build_py import build_py as _build_py
-from distutils.util import convert_path
-from glob import glob
 
 try:
     from setuptools.lib2to3_ex import Mixin2to3
@@ -13,7 +13,8 @@ except ImportError:
         def run_2to3(self, files, doctests=True):
             "do nothing"
 
-class build_py(_build_py, Mixin2to3):
+
+class build_py(orig.build_py, Mixin2to3):
     """Enhanced 'build_py' command that includes data files with packages
 
     The data files are specified via a 'package_data' argument to 'setup()'.
@@ -22,11 +23,14 @@ class build_py(_build_py, Mixin2to3):
     Also, this version of the 'build_py' command allows you to specify both
     'py_modules' and 'packages' in the same setup operation.
     """
+
     def finalize_options(self):
-        _build_py.finalize_options(self)
+        orig.build_py.finalize_options(self)
         self.package_data = self.distribution.package_data
-        self.exclude_package_data = self.distribution.exclude_package_data or {}
-        if 'data_files' in self.__dict__: del self.__dict__['data_files']
+        self.exclude_package_data = (self.distribution.exclude_package_data or
+                                     {})
+        if 'data_files' in self.__dict__:
+            del self.__dict__['data_files']
         self.__updated_files = []
         self.__doctests_2to3 = []
 
@@ -48,16 +52,17 @@ class build_py(_build_py, Mixin2to3):
 
         # Only compile actual .py files, using our base class' idea of what our
         # output files are.
-        self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
+        self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
 
     def __getattr__(self, attr):
-        if attr=='data_files':  # lazily compute data files
+        if attr == 'data_files':  # lazily compute data files
             self.data_files = files = self._get_data_files()
             return files
-        return _build_py.__getattr__(self,attr)
+        return orig.build_py.__getattr__(self, attr)
 
     def build_module(self, module, module_file, package):
-        outfile, copied = _build_py.build_module(self, module, module_file, package)
+        outfile, copied = orig.build_py.build_module(self, module, module_file,
+                                                     package)
         if copied:
             self.__updated_files.append(outfile)
         return outfile, copied
@@ -74,12 +79,12 @@ class build_py(_build_py, Mixin2to3):
             build_dir = os.path.join(*([self.build_lib] + package.split('.')))
 
             # Length of path to strip from found files
-            plen = len(src_dir)+1
+            plen = len(src_dir) + 1
 
             # Strip directory from globbed filenames
             filenames = [
                 file[plen:] for file in self.find_data_files(package, src_dir)
-                ]
+            ]
             data.append((package, src_dir, build_dir, filenames))
         return data
 
@@ -102,7 +107,8 @@ class build_py(_build_py, Mixin2to3):
                 srcfile = os.path.join(src_dir, filename)
                 outf, copied = self.copy_file(srcfile, target)
                 srcfile = os.path.abspath(srcfile)
-                if copied and srcfile in self.distribution.convert_2to3_doctests:
+                if (copied and
+                        srcfile in self.distribution.convert_2to3_doctests):
                     self.__doctests_2to3.append(outf)
 
     def analyze_manifest(self):
@@ -117,21 +123,22 @@ class build_py(_build_py, Mixin2to3):
         self.run_command('egg_info')
         ei_cmd = self.get_finalized_command('egg_info')
         for path in ei_cmd.filelist.files:
-            d,f = os.path.split(assert_relative(path))
+            d, f = os.path.split(assert_relative(path))
             prev = None
             oldf = f
-            while d and d!=prev and d not in src_dirs:
+            while d and d != prev and d not in src_dirs:
                 prev = d
                 d, df = os.path.split(d)
                 f = os.path.join(df, f)
             if d in src_dirs:
-                if path.endswith('.py') and f==oldf:
-                    continue    # it's a module, not data
-                mf.setdefault(src_dirs[d],[]).append(path)
+                if path.endswith('.py') and f == oldf:
+                    continue  # it's a module, not data
+                mf.setdefault(src_dirs[d], []).append(path)
 
-    def get_data_files(self): pass  # kludge 2.4 for lazy computation
+    def get_data_files(self):
+        pass  # kludge 2.4 for lazy computation
 
-    if sys.version<"2.4":    # Python 2.4 already has this code
+    if sys.version < "2.4":  # Python 2.4 already has this code
         def get_outputs(self, include_bytecode=1):
             """Return complete list of files copied to the build directory
 
@@ -140,11 +147,11 @@ class build_py(_build_py, Mixin2to3):
             needed for the 'install_lib' command to do its job properly, and to
             generate a correct installation manifest.)
             """
-            return _build_py.get_outputs(self, include_bytecode) + [
+            return orig.build_py.get_outputs(self, include_bytecode) + [
                 os.path.join(build_dir, filename)
-                for package, src_dir, build_dir,filenames in self.data_files
+                for package, src_dir, build_dir, filenames in self.data_files
                 for filename in filenames
-                ]
+            ]
 
     def check_package(self, package, package_dir):
         """Check namespace packages' __init__ for declare_namespace"""
@@ -153,36 +160,37 @@ class build_py(_build_py, Mixin2to3):
         except KeyError:
             pass
 
-        init_py = _build_py.check_package(self, package, package_dir)
+        init_py = orig.build_py.check_package(self, package, package_dir)
         self.packages_checked[package] = init_py
 
         if not init_py or not self.distribution.namespace_packages:
             return init_py
 
         for pkg in self.distribution.namespace_packages:
-            if pkg==package or pkg.startswith(package+'.'):
+            if pkg == package or pkg.startswith(package + '.'):
                 break
         else:
             return init_py
 
-        f = open(init_py,'rbU')
+        f = open(init_py, 'rbU')
         if 'declare_namespace'.encode() not in f.read():
-            from distutils import log
-            log.warn(
-                "WARNING: %s is a namespace package, but its __init__.py does\n"
-                "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
-                '(See the setuptools manual under "Namespace Packages" for '
-                "details.)\n", package
+            from distutils.errors import DistutilsError
+
+            raise DistutilsError(
+                "Namespace package problem: %s is a namespace package, but "
+                "its\n__init__.py does not call declare_namespace()! Please "
+                'fix it.\n(See the setuptools manual under '
+                '"Namespace Packages" for details.)\n"' % (package,)
             )
         f.close()
         return init_py
 
     def initialize_options(self):
-        self.packages_checked={}
-        _build_py.initialize_options(self)
+        self.packages_checked = {}
+        orig.build_py.initialize_options(self)
 
     def get_package_dir(self, package):
-        res = _build_py.get_package_dir(self, package)
+        res = orig.build_py.get_package_dir(self, package)
         if self.distribution.src_root is not None:
             return os.path.join(self.distribution.src_root, res)
         return res
@@ -202,7 +210,7 @@ class build_py(_build_py, Mixin2to3):
         seen = {}
         return [
             f for f in files if f not in bad
-                and f not in seen and seen.setdefault(f,1)  # ditch dupes
+            and f not in seen and seen.setdefault(f, 1)  # ditch dupes
         ]
 
 
@@ -210,6 +218,7 @@ def assert_relative(path):
     if not os.path.isabs(path):
         return path
     from distutils.errors import DistutilsSetupError
+
     msg = textwrap.dedent("""
         Error: setup script specifies an absolute path:
 
diff --git a/awx/lib/site-packages/setuptools/command/develop.py b/awx/lib/site-packages/setuptools/command/develop.py
index 1d500040d0..368b64fed7 100644
--- a/awx/lib/site-packages/setuptools/command/develop.py
+++ b/awx/lib/site-packages/setuptools/command/develop.py
@@ -1,9 +1,14 @@
-from setuptools.command.easy_install import easy_install
-from distutils.util import convert_path, subst_vars
-from pkg_resources import Distribution, PathMetadata, normalize_path
+from distutils.util import convert_path
 from distutils import log
 from distutils.errors import DistutilsError, DistutilsOptionError
-import os, sys, setuptools, glob
+import os
+import glob
+
+from pkg_resources import Distribution, PathMetadata, normalize_path
+from setuptools.command.easy_install import easy_install
+from setuptools.compat import PY3
+import setuptools
+
 
 class develop(easy_install):
     """Set up package for development"""
@@ -32,59 +37,56 @@ class develop(easy_install):
         self.egg_path = None
         easy_install.initialize_options(self)
         self.setup_path = None
-        self.always_copy_from = '.'   # always copy eggs installed in curdir
-
-
+        self.always_copy_from = '.'  # always copy eggs installed in curdir
 
     def finalize_options(self):
         ei = self.get_finalized_command("egg_info")
         if ei.broken_egg_info:
-            raise DistutilsError(
-            "Please rename %r to %r before using 'develop'"
-            % (ei.egg_info, ei.broken_egg_info)
-            )
+            template = "Please rename %r to %r before using 'develop'"
+            args = ei.egg_info, ei.broken_egg_info
+            raise DistutilsError(template % args)
         self.args = [ei.egg_name]
 
-
-
-
         easy_install.finalize_options(self)
         self.expand_basedirs()
         self.expand_dirs()
         # pick up setup-dir .egg files only: no .egg-info
         self.package_index.scan(glob.glob('*.egg'))
 
-        self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
+        self.egg_link = os.path.join(self.install_dir, ei.egg_name +
+                                     '.egg-link')
         self.egg_base = ei.egg_base
         if self.egg_path is None:
             self.egg_path = os.path.abspath(ei.egg_base)
 
         target = normalize_path(self.egg_base)
-        if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
+        egg_path = normalize_path(os.path.join(self.install_dir,
+                                               self.egg_path))
+        if egg_path != target:
             raise DistutilsOptionError(
                 "--egg-path must be a relative path from the install"
-                " directory to "+target
-        )
+                " directory to " + target
+            )
 
         # Make a distribution for the package's source
         self.dist = Distribution(
             target,
             PathMetadata(target, os.path.abspath(ei.egg_info)),
-            project_name = ei.egg_name
+            project_name=ei.egg_name
         )
 
-        p = self.egg_base.replace(os.sep,'/')
-        if p!= os.curdir:
-            p = '../' * (p.count('/')+1)
+        p = self.egg_base.replace(os.sep, '/')
+        if p != os.curdir:
+            p = '../' * (p.count('/') + 1)
         self.setup_path = p
         p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
-        if  p != normalize_path(os.curdir):
+        if p != normalize_path(os.curdir):
             raise DistutilsOptionError(
                 "Can't get a consistent path to setup script from"
                 " installation directory", p, normalize_path(os.curdir))
 
     def install_for_development(self):
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+        if PY3 and getattr(self.distribution, 'use_2to3', False):
             # If we run 2to3 we can not do this inplace:
 
             # Ensure metadata is up-to-date
@@ -99,12 +101,13 @@ class develop(easy_install):
 
             self.reinitialize_command('build_ext', inplace=0)
             self.run_command('build_ext')
-            
+
             # Fixup egg-link and easy-install.pth
             ei_cmd = self.get_finalized_command("egg_info")
             self.egg_path = build_path
             self.dist.location = build_path
-            self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)    # XXX
+            # XXX
+            self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
         else:
             # Without 2to3 inplace works fine:
             self.run_command('egg_info')
@@ -112,7 +115,7 @@ class develop(easy_install):
             # Build extensions in-place
             self.reinitialize_command('build_ext', inplace=1)
             self.run_command('build_ext')
-        
+
         self.install_site_py()  # ensure that target dir is site-safe
         if setuptools.bootstrap_install_from:
             self.easy_install(setuptools.bootstrap_install_from)
@@ -121,21 +124,21 @@ class develop(easy_install):
         # create an .egg-link in the installation dir, pointing to our egg
         log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
         if not self.dry_run:
-            f = open(self.egg_link,"w")
+            f = open(self.egg_link, "w")
             f.write(self.egg_path + "\n" + self.setup_path)
             f.close()
         # postprocess the installed distro, fixing up .pth, installing scripts,
         # and handling requirements
         self.process_distribution(None, self.dist, not self.no_deps)
 
-
     def uninstall_link(self):
         if os.path.exists(self.egg_link):
             log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
             egg_link_file = open(self.egg_link)
             contents = [line.rstrip() for line in egg_link_file]
             egg_link_file.close()
-            if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
+            if contents not in ([self.egg_path],
+                                [self.egg_path, self.setup_path]):
                 log.warn("Link points to %s: uninstall aborted", contents)
                 return
             if not self.dry_run:
@@ -149,7 +152,7 @@ class develop(easy_install):
     def install_egg_scripts(self, dist):
         if dist is not self.dist:
             # Installing a dependency, so fall back to normal behavior
-            return easy_install.install_egg_scripts(self,dist)
+            return easy_install.install_egg_scripts(self, dist)
 
         # create wrapper scripts in the script dir, pointing to dist.scripts
 
@@ -160,8 +163,7 @@ class develop(easy_install):
         for script_name in self.distribution.scripts or []:
             script_path = os.path.abspath(convert_path(script_name))
             script_name = os.path.basename(script_path)
-            f = open(script_path,'rU')
+            f = open(script_path, 'rU')
             script_text = f.read()
             f.close()
             self.install_script(dist, script_name, script_text, script_path)
-
diff --git a/awx/lib/site-packages/setuptools/command/easy_install.py b/awx/lib/site-packages/setuptools/command/easy_install.py
index 08ebf3e589..e057b508e9 100644
--- a/awx/lib/site-packages/setuptools/command/easy_install.py
+++ b/awx/lib/site-packages/setuptools/command/easy_install.py
@@ -12,6 +12,14 @@ __ https://pythonhosted.org/setuptools/easy_install.html
 
 """
 
+from glob import glob
+from distutils.util import get_platform
+from distutils.util import convert_path, subst_vars
+from distutils.errors import DistutilsArgError, DistutilsOptionError, \
+    DistutilsError, DistutilsPlatformError
+from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
+from distutils import log, dir_util
+from distutils.command.build_scripts import first_line_re
 import sys
 import os
 import zipimport
@@ -26,44 +34,43 @@ import textwrap
 import warnings
 import site
 import struct
-from glob import glob
-from distutils import log, dir_util
+import contextlib
+import subprocess
+import shlex
+import io
 
-import pkg_resources
-from setuptools import Command, _dont_write_bytecode
+from setuptools import Command
 from setuptools.sandbox import run_setup
 from setuptools.py31compat import get_path, get_config_vars
-
-from distutils.util import get_platform
-from distutils.util import convert_path, subst_vars
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
-    DistutilsError, DistutilsPlatformError
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
 from setuptools.command import setopt
 from setuptools.archive_util import unpack_archive
 from setuptools.package_index import PackageIndex
 from setuptools.package_index import URL_SCHEME
 from setuptools.command import bdist_egg, egg_info
 from setuptools.compat import (iteritems, maxsize, basestring, unicode,
-                               reraise)
+                               reraise, PY2, PY3)
 from pkg_resources import (
     yield_lines, normalize_path, resource_string, ensure_directory,
     get_distribution, find_distributions, Environment, Requirement,
     Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
     VersionConflict, DEVELOP_DIST,
 )
+import pkg_resources
+
+# Turn on PEP440Warnings
+warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
 
-sys_executable = os.environ.get('__VENV_LAUNCHER__',
-    os.path.normpath(sys.executable))
 
 __all__ = [
     'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
     'main', 'get_exe_prefixes',
 ]
 
+
 def is_64bit():
     return struct.calcsize("P") == 8
 
+
 def samefile(p1, p2):
     both_exist = os.path.exists(p1) and os.path.exists(p2)
     use_samefile = hasattr(os.path, 'samefile') and both_exist
@@ -73,9 +80,11 @@ def samefile(p1, p2):
     norm_p2 = os.path.normpath(os.path.normcase(p2))
     return norm_p1 == norm_p2
 
-if sys.version_info <= (3,):
+
+if PY2:
     def _to_ascii(s):
         return s
+
     def isascii(s):
         try:
             unicode(s, 'ascii')
@@ -85,6 +94,7 @@ if sys.version_info <= (3,):
 else:
     def _to_ascii(s):
         return s.encode('ascii')
+
     def isascii(s):
         try:
             s.encode('ascii')
@@ -92,6 +102,7 @@ else:
         except UnicodeError:
             return False
 
+
 class easy_install(Command):
     """Manage a download/build/install process"""
     description = "Find/get/install Python packages"
@@ -109,22 +120,22 @@ class easy_install(Command):
         ("index-url=", "i", "base URL of Python Package Index"),
         ("find-links=", "f", "additional URL(s) to search for packages"),
         ("build-directory=", "b",
-            "download/extract/build in DIR; keep the results"),
+         "download/extract/build in DIR; keep the results"),
         ('optimize=', 'O',
-            "also compile with optimization: -O1 for \"python -O\", "
-            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
         ('record=', None,
-            "filename in which to record list of installed files"),
+         "filename in which to record list of installed files"),
         ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
-        ('site-dirs=','S',"list of directories where .pth files work"),
+        ('site-dirs=', 'S', "list of directories where .pth files work"),
         ('editable', 'e', "Install specified packages in editable form"),
         ('no-deps', 'N', "don't install dependencies"),
         ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
         ('local-snapshots-ok', 'l',
-            "allow building eggs from local checkouts"),
+         "allow building eggs from local checkouts"),
         ('version', None, "print version information and exit"),
         ('no-find-links', None,
-            "Don't load find-links defined in packages being installed")
+         "Don't load find-links defined in packages being installed")
     ]
     boolean_options = [
         'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
@@ -158,10 +169,10 @@ class easy_install(Command):
         self.editable = self.no_deps = self.allow_hosts = None
         self.root = self.prefix = self.no_report = None
         self.version = None
-        self.install_purelib = None     # for pure module distributions
-        self.install_platlib = None     # non-pure (dists w/ extensions)
-        self.install_headers = None     # for C/C++ headers
-        self.install_lib = None         # set to either purelib or platlib
+        self.install_purelib = None  # for pure module distributions
+        self.install_platlib = None  # non-pure (dists w/ extensions)
+        self.install_headers = None  # for C/C++ headers
+        self.install_lib = None  # set to either purelib or platlib
         self.install_scripts = None
         self.install_data = None
         self.install_base = None
@@ -196,7 +207,8 @@ class easy_install(Command):
             if os.path.exists(filename) or os.path.islink(filename):
                 log.info("Deleting %s", filename)
                 if not self.dry_run:
-                    if os.path.isdir(filename) and not os.path.islink(filename):
+                    if (os.path.isdir(filename) and
+                            not os.path.islink(filename)):
                         rmtree(filename)
                     else:
                         os.unlink(filename)
@@ -229,7 +241,7 @@ class easy_install(Command):
             self.config_vars['usersite'] = self.install_usersite
 
         # fix the install_dir if "--user" was used
-        #XXX: duplicate of the code in the setup command
+        # XXX: duplicate of the code in the setup command
         if self.user and site.ENABLE_USER_SITE:
             self.create_home_path()
             if self.install_userbase is None:
@@ -244,7 +256,8 @@ class easy_install(Command):
         self.expand_basedirs()
         self.expand_dirs()
 
-        self._expand('install_dir','script_dir','build_directory','site_dirs')
+        self._expand('install_dir', 'script_dir', 'build_directory',
+                     'site_dirs')
         # If a non-default installation directory was specified, default the
         # script directory to match it.
         if self.script_dir is None:
@@ -256,12 +269,12 @@ class easy_install(Command):
         # Let install_dir get set by install_lib command, which in turn
         # gets its info from the install command, and takes into account
         # --prefix and --home and all that other crud.
-        self.set_undefined_options('install_lib',
-            ('install_dir','install_dir')
+        self.set_undefined_options(
+            'install_lib', ('install_dir', 'install_dir')
         )
         # Likewise, set default script_dir from 'install_scripts.install_dir'
-        self.set_undefined_options('install_scripts',
-            ('install_dir', 'script_dir')
+        self.set_undefined_options(
+            'install_scripts', ('install_dir', 'script_dir')
         )
 
         if self.user and self.install_purelib:
@@ -275,18 +288,20 @@ class easy_install(Command):
         self.all_site_dirs = get_site_dirs()
         if self.site_dirs is not None:
             site_dirs = [
-                os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
+                os.path.expanduser(s.strip()) for s in
+                self.site_dirs.split(',')
             ]
             for d in site_dirs:
                 if not os.path.isdir(d):
                     log.warn("%s (in --site-dirs) does not exist", d)
                 elif normalize_path(d) not in normpath:
                     raise DistutilsOptionError(
-                        d+" (in --site-dirs) is not on sys.path"
+                        d + " (in --site-dirs) is not on sys.path"
                     )
                 else:
                     self.all_site_dirs.append(normalize_path(d))
-        if not self.editable: self.check_site_dir()
+        if not self.editable:
+            self.check_site_dir()
         self.index_url = self.index_url or "https://pypi.python.org/simple"
         self.shadow_path = self.all_site_dirs[:]
         for path_item in self.install_dir, normalize_path(self.script_dir):
@@ -299,9 +314,9 @@ class easy_install(Command):
             hosts = ['*']
         if self.package_index is None:
             self.package_index = self.create_index(
-                self.index_url, search_path = self.shadow_path, hosts=hosts,
+                self.index_url, search_path=self.shadow_path, hosts=hosts,
             )
-        self.local_index = Environment(self.shadow_path+sys.path)
+        self.local_index = Environment(self.shadow_path + sys.path)
 
         if self.find_links is not None:
             if isinstance(self.find_links, basestring):
@@ -309,14 +324,15 @@ class easy_install(Command):
         else:
             self.find_links = []
         if self.local_snapshots_ok:
-            self.package_index.scan_egg_links(self.shadow_path+sys.path)
+            self.package_index.scan_egg_links(self.shadow_path + sys.path)
         if not self.no_find_links:
             self.package_index.add_find_links(self.find_links)
-        self.set_undefined_options('install_lib', ('optimize','optimize'))
-        if not isinstance(self.optimize,int):
+        self.set_undefined_options('install_lib', ('optimize', 'optimize'))
+        if not isinstance(self.optimize, int):
             try:
                 self.optimize = int(self.optimize)
-                if not (0 <= self.optimize <= 2): raise ValueError
+                if not (0 <= self.optimize <= 2):
+                    raise ValueError
             except ValueError:
                 raise DistutilsOptionError("--optimize must be 0, 1, or 2")
 
@@ -348,7 +364,7 @@ class easy_install(Command):
         """Calls `os.path.expanduser` on install dirs."""
         self._expand_attrs(['install_purelib', 'install_platlib',
                             'install_lib', 'install_headers',
-                            'install_scripts', 'install_data',])
+                            'install_scripts', 'install_data', ])
 
     def run(self):
         if self.verbose != self.distribution.verbose:
@@ -358,11 +374,12 @@ class easy_install(Command):
                 self.easy_install(spec, not self.no_deps)
             if self.record:
                 outputs = self.outputs
-                if self.root:               # strip any package prefix
+                if self.root:  # strip any package prefix
                     root_len = len(self.root)
                     for counter in range(len(outputs)):
                         outputs[counter] = outputs[counter][root_len:]
                 from distutils import file_util
+
                 self.execute(
                     file_util.write_file, (self.record, outputs),
                     "writing list of installed files to '%s'" %
@@ -390,7 +407,7 @@ class easy_install(Command):
         """Verify that self.install_dir is .pth-capable dir, if needed"""
 
         instdir = normalize_path(self.install_dir)
-        pth_file = os.path.join(instdir,'easy-install.pth')
+        pth_file = os.path.join(instdir, 'easy-install.pth')
 
         # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
         is_site_dir = instdir in self.all_site_dirs
@@ -400,13 +417,14 @@ class easy_install(Command):
             is_site_dir = self.check_pth_processing()
         else:
             # make sure we can write to target dir
-            testfile = self.pseudo_tempname()+'.write-test'
+            testfile = self.pseudo_tempname() + '.write-test'
             test_exists = os.path.exists(testfile)
             try:
-                if test_exists: os.unlink(testfile)
-                open(testfile,'w').close()
+                if test_exists:
+                    os.unlink(testfile)
+                open(testfile, 'w').close()
                 os.unlink(testfile)
-            except (OSError,IOError):
+            except (OSError, IOError):
                 self.cant_write_to_target()
 
         if not is_site_dir and not self.multi_version:
@@ -419,84 +437,94 @@ class easy_install(Command):
         else:
             self.pth_file = None
 
-        PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
+        PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
         if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]):
             # only PYTHONPATH dirs need a site.py, so pretend it's there
             self.sitepy_installed = True
         elif self.multi_version and not os.path.exists(pth_file):
-            self.sitepy_installed = True    # don't need site.py in this case
-            self.pth_file = None            # and don't create a .pth file
+            self.sitepy_installed = True  # don't need site.py in this case
+            self.pth_file = None  # and don't create a .pth file
         self.install_dir = instdir
 
+    __cant_write_msg = textwrap.dedent("""
+        can't create or remove files in install directory
+
+        The following error occurred while trying to add or remove files in the
+        installation directory:
+
+            %s
+
+        The installation directory you specified (via --install-dir, --prefix, or
+        the distutils default setting) was:
+
+            %s
+        """).lstrip()
+
+    __not_exists_id = textwrap.dedent("""
+        This directory does not currently exist.  Please create it and try again, or
+        choose a different installation directory (using the -d or --install-dir
+        option).
+        """).lstrip()
+
+    __access_msg = textwrap.dedent("""
+        Perhaps your account does not have write access to this directory?  If the
+        installation directory is a system-owned directory, you may need to sign in
+        as the administrator or "root" account.  If you do not have administrative
+        access to this machine, you may wish to choose a different installation
+        directory, preferably one that is listed in your PYTHONPATH environment
+        variable.
+
+        For information on other options, you may wish to consult the
+        documentation at:
+
+          https://pythonhosted.org/setuptools/easy_install.html
+
+        Please make the appropriate changes for your system and try again.
+        """).lstrip()
+
     def cant_write_to_target(self):
-        template = """can't create or remove files in install directory
-
-The following error occurred while trying to add or remove files in the
-installation directory:
-
-    %s
-
-The installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
-    %s
-"""
-        msg = template % (sys.exc_info()[1], self.install_dir,)
+        msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
 
         if not os.path.exists(self.install_dir):
-            msg += """
-This directory does not currently exist.  Please create it and try again, or
-choose a different installation directory (using the -d or --install-dir
-option).
-"""
+            msg += '\n' + self.__not_exists_id
         else:
-            msg += """
-Perhaps your account does not have write access to this directory?  If the
-installation directory is a system-owned directory, you may need to sign in
-as the administrator or "root" account.  If you do not have administrative
-access to this machine, you may wish to choose a different installation
-directory, preferably one that is listed in your PYTHONPATH environment
-variable.
-
-For information on other options, you may wish to consult the
-documentation at:
-
-  https://pythonhosted.org/setuptools/easy_install.html
-
-Please make the appropriate changes for your system and try again.
-"""
+            msg += '\n' + self.__access_msg
         raise DistutilsError(msg)
 
     def check_pth_processing(self):
         """Empirically verify whether .pth files are supported in inst. dir"""
         instdir = self.install_dir
         log.info("Checking .pth file support in %s", instdir)
-        pth_file = self.pseudo_tempname()+".pth"
-        ok_file = pth_file+'.ok'
+        pth_file = self.pseudo_tempname() + ".pth"
+        ok_file = pth_file + '.ok'
         ok_exists = os.path.exists(ok_file)
         try:
-            if ok_exists: os.unlink(ok_file)
+            if ok_exists:
+                os.unlink(ok_file)
             dirname = os.path.dirname(ok_file)
             if not os.path.exists(dirname):
                 os.makedirs(dirname)
-            f = open(pth_file,'w')
-        except (OSError,IOError):
+            f = open(pth_file, 'w')
+        except (OSError, IOError):
             self.cant_write_to_target()
         else:
             try:
-                f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,))
+                f.write("import os; f = open(%r, 'w'); f.write('OK'); "
+                        "f.close()\n" % (ok_file,))
                 f.close()
-                f=None
+                f = None
                 executable = sys.executable
-                if os.name=='nt':
-                    dirname,basename = os.path.split(executable)
-                    alt = os.path.join(dirname,'pythonw.exe')
-                    if basename.lower()=='python.exe' and os.path.exists(alt):
+                if os.name == 'nt':
+                    dirname, basename = os.path.split(executable)
+                    alt = os.path.join(dirname, 'pythonw.exe')
+                    if (basename.lower() == 'python.exe' and
+                            os.path.exists(alt)):
                         # use pythonw.exe to avoid opening a console window
                         executable = alt
 
                 from distutils.spawn import spawn
-                spawn([executable,'-E','-c','pass'],0)
+
+                spawn([executable, '-E', '-c', 'pass'], 0)
 
                 if os.path.exists(ok_file):
                     log.info(
@@ -525,7 +553,7 @@ Please make the appropriate changes for your system and try again.
                     continue
                 self.install_script(
                     dist, script_name,
-                    dist.get_metadata('scripts/'+script_name)
+                    dist.get_metadata('scripts/' + script_name)
                 )
         self.install_wrapper_scripts(dist)
 
@@ -533,7 +561,7 @@ Please make the appropriate changes for your system and try again.
         if os.path.isdir(path):
             for base, dirs, files in os.walk(path):
                 for filename in files:
-                    self.outputs.append(os.path.join(base,filename))
+                    self.outputs.append(os.path.join(base, filename))
         else:
             self.outputs.append(path)
 
@@ -545,7 +573,7 @@ Please make the appropriate changes for your system and try again.
                 % (spec,)
             )
 
-    def check_editable(self,spec):
+    def check_editable(self, spec):
         if not self.editable:
             return
 
@@ -558,15 +586,17 @@ Please make the appropriate changes for your system and try again.
     def easy_install(self, spec, deps=False):
         tmpdir = tempfile.mkdtemp(prefix="easy_install-")
         download = None
-        if not self.editable: self.install_site_py()
+        if not self.editable:
+            self.install_site_py()
 
         try:
-            if not isinstance(spec,Requirement):
+            if not isinstance(spec, Requirement):
                 if URL_SCHEME(spec):
                     # It's a url, download it to tmpdir and process
                     self.not_editable(spec)
                     download = self.package_index.download(spec, tmpdir)
-                    return self.install_item(None, download, tmpdir, deps, True)
+                    return self.install_item(None, download, tmpdir, deps,
+                                             True)
 
                 elif os.path.exists(spec):
                     # Existing file or directory, just process it directly
@@ -577,15 +607,15 @@ Please make the appropriate changes for your system and try again.
 
             self.check_editable(spec)
             dist = self.package_index.fetch_distribution(
-                spec, tmpdir, self.upgrade, self.editable, not self.always_copy,
-                self.local_index
+                spec, tmpdir, self.upgrade, self.editable,
+                not self.always_copy, self.local_index
             )
             if dist is None:
                 msg = "Could not find suitable distribution for %r" % spec
                 if self.always_copy:
-                    msg+=" (--always-copy skips system and development eggs)"
+                    msg += " (--always-copy skips system and development eggs)"
                 raise DistutilsError(msg)
-            elif dist.precedence==DEVELOP_DIST:
+            elif dist.precedence == DEVELOP_DIST:
                 # .egg-info dists don't need installing, just process deps
                 self.process_distribution(spec, dist, deps, "Using")
                 return dist
@@ -612,10 +642,10 @@ Please make the appropriate changes for your system and try again.
             # at this point, we know it's a local .egg, we just don't know if
             # it's already installed.
             for dist in self.local_index[spec.project_name]:
-                if dist.location==download:
+                if dist.location == download:
                     break
             else:
-                install_needed = True   # it's not in the local index
+                install_needed = True  # it's not in the local index
 
         log.info("Processing %s", os.path.basename(download))
 
@@ -644,6 +674,8 @@ Please make the appropriate changes for your system and try again.
     def process_distribution(self, requirement, dist, deps=True, *info):
         self.update_pth(dist)
         self.package_index.add(dist)
+        if dist in self.local_index[dist.key]:
+            self.local_index.remove(dist)
         self.local_index.add(dist)
         self.install_egg_scripts(dist)
         self.installed_projects[dist.key] = dist
@@ -670,17 +702,12 @@ Please make the appropriate changes for your system and try again.
             distros = WorkingSet([]).resolve(
                 [requirement], self.local_index, self.easy_install
             )
-        except DistributionNotFound:
-            e = sys.exc_info()[1]
+        except DistributionNotFound as e:
             raise DistutilsError(
                 "Could not find required distribution %s" % e.args
             )
-        except VersionConflict:
-            e = sys.exc_info()[1]
-            raise DistutilsError(
-                "Installed distribution %s conflicts with requirement %s"
-                % e.args
-            )
+        except VersionConflict as e:
+            raise DistutilsError(e.report())
         if self.always_copy or self.always_copy_from:
             # Force all the relevant distros to be copied or activated
             for dist in distros:
@@ -700,17 +727,18 @@ Please make the appropriate changes for your system and try again.
     def maybe_move(self, spec, dist_filename, setup_base):
         dst = os.path.join(self.build_directory, spec.key)
         if os.path.exists(dst):
-            msg = "%r already exists in %s; build directory %s will not be kept"
+            msg = ("%r already exists in %s; build directory %s will not be "
+                   "kept")
             log.warn(msg, spec.key, self.build_directory, setup_base)
             return setup_base
         if os.path.isdir(dist_filename):
             setup_base = dist_filename
         else:
-            if os.path.dirname(dist_filename)==setup_base:
-                os.unlink(dist_filename)   # get it out of the tmp dir
+            if os.path.dirname(dist_filename) == setup_base:
+                os.unlink(dist_filename)  # get it out of the tmp dir
             contents = os.listdir(setup_base)
-            if len(contents)==1:
-                dist_filename = os.path.join(setup_base,contents[0])
+            if len(contents) == 1:
+                dist_filename = os.path.join(setup_base, contents[0])
                 if os.path.isdir(dist_filename):
                     # if the only thing there is a directory, move it instead
                     setup_base = dist_filename
@@ -720,7 +748,7 @@ Please make the appropriate changes for your system and try again.
 
     def install_wrapper_scripts(self, dist):
         if not self.exclude_scripts:
-            for args in get_script_args(dist):
+            for args in ScriptWriter.best().get_args(dist):
                 self.write_script(*args)
 
     def install_script(self, dist, script_name, script_text, dev_path=None):
@@ -728,32 +756,31 @@ Please make the appropriate changes for your system and try again.
         spec = str(dist.as_requirement())
         is_script = is_python_script(script_text, script_name)
 
-        def get_template(filename):
-            """
-            There are a couple of template scripts in the package. This
-            function loads one of them and prepares it for use.
-
-            These templates use triple-quotes to escape variable
-            substitutions so the scripts get the 2to3 treatment when build
-            on Python 3. The templates cannot use triple-quotes naturally.
-            """
-            raw_bytes = resource_string('setuptools', template_name)
-            template_str = raw_bytes.decode('utf-8')
-            clean_template = template_str.replace('"""', '')
-            return clean_template
-
         if is_script:
-            template_name = 'script template.py'
-            if dev_path:
-                template_name = template_name.replace('.py', ' (dev).py')
-            script_text = (get_script_header(script_text) +
-                get_template(template_name) % locals())
+            script_text = (ScriptWriter.get_header(script_text) +
+                           self._load_template(dev_path) % locals())
         self.write_script(script_name, _to_ascii(script_text), 'b')
 
+    @staticmethod
+    def _load_template(dev_path):
+        """
+        There are a couple of template scripts in the package. This
+        function loads one of them and prepares it for use.
+        """
+        # See https://bitbucket.org/pypa/setuptools/issue/134 for info
+        # on script file naming and downstream issues with SVR4
+        name = 'script.tmpl'
+        if dev_path:
+            name = name.replace('.tmpl', ' (dev).tmpl')
+
+        raw_bytes = resource_string('setuptools', name)
+        return raw_bytes.decode('utf-8')
+
     def write_script(self, script_name, contents, mode="t", blockers=()):
         """Write an executable file to the scripts directory"""
-        self.delete_blockers(   # clean up old .py/.pyw w/o a script
-            [os.path.join(self.script_dir,x) for x in blockers])
+        self.delete_blockers(  # clean up old .py/.pyw w/o a script
+            [os.path.join(self.script_dir, x) for x in blockers]
+        )
         log.info("Installing %s script to %s", script_name, self.script_dir)
         target = os.path.join(self.script_dir, script_name)
         self.add_output(target)
@@ -763,10 +790,10 @@ Please make the appropriate changes for your system and try again.
             ensure_directory(target)
             if os.path.exists(target):
                 os.unlink(target)
-            f = open(target,"w"+mode)
+            f = open(target, "w" + mode)
             f.write(contents)
             f.close()
-            chmod(target, 0x1FF-mask) # 0777
+            chmod(target, 0o777 - mask)
 
     def install_eggs(self, spec, dist_filename, tmpdir):
         # .egg dirs or files are already built, so just return them
@@ -782,7 +809,7 @@ Please make the appropriate changes for your system and try again.
         elif os.path.isdir(dist_filename):
             setup_base = os.path.abspath(dist_filename)
 
-        if (setup_base.startswith(tmpdir)   # something we downloaded
+        if (setup_base.startswith(tmpdir)  # something we downloaded
                 and self.build_directory and spec is not None):
             setup_base = self.maybe_move(spec, dist_filename, setup_base)
 
@@ -793,11 +820,13 @@ Please make the appropriate changes for your system and try again.
             setups = glob(os.path.join(setup_base, '*', 'setup.py'))
             if not setups:
                 raise DistutilsError(
-                    "Couldn't find a setup script in %s" % os.path.abspath(dist_filename)
+                    "Couldn't find a setup script in %s" %
+                    os.path.abspath(dist_filename)
                 )
-            if len(setups)>1:
+            if len(setups) > 1:
                 raise DistutilsError(
-                    "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
+                    "Multiple setup scripts in %s" %
+                    os.path.abspath(dist_filename)
                 )
             setup_script = setups[0]
 
@@ -810,13 +839,15 @@ Please make the appropriate changes for your system and try again.
 
     def egg_distribution(self, egg_path):
         if os.path.isdir(egg_path):
-            metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
+            metadata = PathMetadata(egg_path, os.path.join(egg_path,
+                                                           'EGG-INFO'))
         else:
             metadata = EggMetadata(zipimport.zipimporter(egg_path))
-        return Distribution.from_filename(egg_path,metadata=metadata)
+        return Distribution.from_filename(egg_path, metadata=metadata)
 
     def install_egg(self, egg_path, tmpdir):
-        destination = os.path.join(self.install_dir,os.path.basename(egg_path))
+        destination = os.path.join(self.install_dir,
+                                   os.path.basename(egg_path))
         destination = os.path.abspath(destination)
         if not self.dry_run:
             ensure_directory(destination)
@@ -826,24 +857,33 @@ Please make the appropriate changes for your system and try again.
             if os.path.isdir(destination) and not os.path.islink(destination):
                 dir_util.remove_tree(destination, dry_run=self.dry_run)
             elif os.path.exists(destination):
-                self.execute(os.unlink,(destination,),"Removing "+destination)
-            uncache_zipdir(destination)
-            if os.path.isdir(egg_path):
-                if egg_path.startswith(tmpdir):
-                    f,m = shutil.move, "Moving"
+                self.execute(os.unlink, (destination,), "Removing " +
+                             destination)
+            try:
+                new_dist_is_zipped = False
+                if os.path.isdir(egg_path):
+                    if egg_path.startswith(tmpdir):
+                        f, m = shutil.move, "Moving"
+                    else:
+                        f, m = shutil.copytree, "Copying"
+                elif self.should_unzip(dist):
+                    self.mkpath(destination)
+                    f, m = self.unpack_and_compile, "Extracting"
                 else:
-                    f,m = shutil.copytree, "Copying"
-            elif self.should_unzip(dist):
-                self.mkpath(destination)
-                f,m = self.unpack_and_compile, "Extracting"
-            elif egg_path.startswith(tmpdir):
-                f,m = shutil.move, "Moving"
-            else:
-                f,m = shutil.copy2, "Copying"
-
-            self.execute(f, (egg_path, destination),
-                (m+" %s to %s") %
-                (os.path.basename(egg_path),os.path.dirname(destination)))
+                    new_dist_is_zipped = True
+                    if egg_path.startswith(tmpdir):
+                        f, m = shutil.move, "Moving"
+                    else:
+                        f, m = shutil.copy2, "Copying"
+                self.execute(f, (egg_path, destination),
+                             (m + " %s to %s") %
+                             (os.path.basename(egg_path),
+                              os.path.dirname(destination)))
+                update_dist_caches(destination,
+                                   fix_zipimporter_caches=new_dist_is_zipped)
+            except:
+                update_dist_caches(destination, fix_zipimporter_caches=False)
+                raise
 
         self.add_output(destination)
         return self.egg_distribution(destination)
@@ -858,30 +898,33 @@ Please make the appropriate changes for your system and try again.
         # Create a dummy distribution object until we build the real distro
         dist = Distribution(
             None,
-            project_name=cfg.get('metadata','name'),
-            version=cfg.get('metadata','version'), platform=get_platform(),
+            project_name=cfg.get('metadata', 'name'),
+            version=cfg.get('metadata', 'version'), platform=get_platform(),
         )
 
         # Convert the .exe to an unpacked egg
-        egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
+        egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
+                                                '.egg')
         egg_tmp = egg_path + '.tmp'
         _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
         pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
-        ensure_directory(pkg_inf)   # make sure EGG-INFO dir exists
-        dist._provider = PathMetadata(egg_tmp, _egg_info)    # XXX
+        ensure_directory(pkg_inf)  # make sure EGG-INFO dir exists
+        dist._provider = PathMetadata(egg_tmp, _egg_info)  # XXX
         self.exe_to_egg(dist_filename, egg_tmp)
 
         # Write EGG-INFO/PKG-INFO
         if not os.path.exists(pkg_inf):
-            f = open(pkg_inf,'w')
+            f = open(pkg_inf, 'w')
             f.write('Metadata-Version: 1.0\n')
-            for k,v in cfg.items('metadata'):
+            for k, v in cfg.items('metadata'):
                 if k != 'target_version':
-                    f.write('%s: %s\n' % (k.replace('_','-').title(), v))
+                    f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
             f.close()
-        script_dir = os.path.join(_egg_info,'scripts')
-        self.delete_blockers(   # delete entry-point scripts to avoid duping
-            [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
+        script_dir = os.path.join(_egg_info, 'scripts')
+        # delete entry-point scripts to avoid duping
+        self.delete_blockers(
+            [os.path.join(script_dir, args[0]) for args in
+             ScriptWriter.get_args(dist)]
         )
         # Build .egg file from tmpdir
         bdist_egg.make_zipfile(
@@ -897,11 +940,12 @@ Please make the appropriate changes for your system and try again.
         to_compile = []
         native_libs = []
         top_level = {}
-        def process(src,dst):
+
+        def process(src, dst):
             s = src.lower()
-            for old,new in prefixes:
+            for old, new in prefixes:
                 if s.startswith(old):
-                    src = new+src[len(old):]
+                    src = new + src[len(old):]
                     parts = src.split('/')
                     dst = os.path.join(egg_tmp, *parts)
                     dl = dst.lower()
@@ -909,97 +953,104 @@ Please make the appropriate changes for your system and try again.
                         parts[-1] = bdist_egg.strip_module(parts[-1])
                         top_level[os.path.splitext(parts[0])[0]] = 1
                         native_libs.append(src)
-                    elif dl.endswith('.py') and old!='SCRIPTS/':
+                    elif dl.endswith('.py') and old != 'SCRIPTS/':
                         top_level[os.path.splitext(parts[0])[0]] = 1
                         to_compile.append(dst)
                     return dst
             if not src.endswith('.pth'):
                 log.warn("WARNING: can't process %s", src)
             return None
+
         # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
         unpack_archive(dist_filename, egg_tmp, process)
         stubs = []
         for res in native_libs:
-            if res.lower().endswith('.pyd'):    # create stubs for .pyd's
+            if res.lower().endswith('.pyd'):  # create stubs for .pyd's
                 parts = res.split('/')
                 resource = parts[-1]
-                parts[-1] = bdist_egg.strip_module(parts[-1])+'.py'
+                parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
                 pyfile = os.path.join(egg_tmp, *parts)
                 to_compile.append(pyfile)
                 stubs.append(pyfile)
                 bdist_egg.write_stub(resource, pyfile)
-        self.byte_compile(to_compile)   # compile .py's
-        bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
+        self.byte_compile(to_compile)  # compile .py's
+        bdist_egg.write_safety_flag(
+            os.path.join(egg_tmp, 'EGG-INFO'),
             bdist_egg.analyze_egg(egg_tmp, stubs))  # write zip-safety flag
 
-        for name in 'top_level','native_libs':
+        for name in 'top_level', 'native_libs':
             if locals()[name]:
-                txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
+                txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
                 if not os.path.exists(txt):
-                    f = open(txt,'w')
-                    f.write('\n'.join(locals()[name])+'\n')
+                    f = open(txt, 'w')
+                    f.write('\n'.join(locals()[name]) + '\n')
                     f.close()
 
+    __mv_warning = textwrap.dedent("""
+        Because this distribution was installed --multi-version, before you can
+        import modules from this package in an application, you will need to
+        'import pkg_resources' and then use a 'require()' call similar to one of
+        these examples, in order to select the desired version:
+
+            pkg_resources.require("%(name)s")  # latest installed version
+            pkg_resources.require("%(name)s==%(version)s")  # this exact version
+            pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
+        """).lstrip()
+
+    __id_warning = textwrap.dedent("""
+        Note also that the installation directory must be on sys.path at runtime for
+        this to work.  (e.g. by being the application's script directory, by being on
+        PYTHONPATH, or by being added to sys.path by your code.)
+        """)
+
     def installation_report(self, req, dist, what="Installed"):
         """Helpful installation message for display to package users"""
         msg = "\n%(what)s %(eggloc)s%(extras)s"
         if self.multi_version and not self.no_report:
-            msg += """
+            msg += '\n' + self.__mv_warning
+            if self.install_dir not in map(normalize_path, sys.path):
+                msg += '\n' + self.__id_warning
 
-Because this distribution was installed --multi-version, before you can
-import modules from this package in an application, you will need to
-'import pkg_resources' and then use a 'require()' call similar to one of
-these examples, in order to select the desired version:
-
-    pkg_resources.require("%(name)s")  # latest installed version
-    pkg_resources.require("%(name)s==%(version)s")  # this exact version
-    pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
-"""
-            if self.install_dir not in map(normalize_path,sys.path):
-                msg += """
-
-Note also that the installation directory must be on sys.path at runtime for
-this to work.  (e.g. by being the application's script directory, by being on
-PYTHONPATH, or by being added to sys.path by your code.)
-"""
         eggloc = dist.location
         name = dist.project_name
         version = dist.version
-        extras = '' # TODO: self.report_extras(req, dist)
+        extras = ''  # TODO: self.report_extras(req, dist)
         return msg % locals()
 
+    __editable_msg = textwrap.dedent("""
+        Extracted editable version of %(spec)s to %(dirname)s
+
+        If it uses setuptools in its setup script, you can activate it in
+        "development" mode by going to that directory and running::
+
+            %(python)s setup.py develop
+
+        See the setuptools documentation for the "develop" command for more info.
+        """).lstrip()
+
     def report_editable(self, spec, setup_script):
         dirname = os.path.dirname(setup_script)
         python = sys.executable
-        return """\nExtracted editable version of %(spec)s to %(dirname)s
-
-If it uses setuptools in its setup script, you can activate it in
-"development" mode by going to that directory and running::
-
-    %(python)s setup.py develop
-
-See the setuptools documentation for the "develop" command for more info.
-""" % locals()
+        return '\n' + self.__editable_msg % locals()
 
     def run_setup(self, setup_script, setup_base, args):
         sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
         sys.modules.setdefault('distutils.command.egg_info', egg_info)
 
         args = list(args)
-        if self.verbose>2:
+        if self.verbose > 2:
             v = 'v' * (self.verbose - 1)
-            args.insert(0,'-'+v)
-        elif self.verbose<2:
-            args.insert(0,'-q')
+            args.insert(0, '-' + v)
+        elif self.verbose < 2:
+            args.insert(0, '-q')
         if self.dry_run:
-            args.insert(0,'-n')
+            args.insert(0, '-n')
         log.info(
-            "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
+            "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
         )
         try:
             run_setup(setup_script, args)
-        except SystemExit:
-            v = sys.exc_info()[1]
+        except SystemExit as v:
             raise DistutilsError("Setup script exited with %s" % (v.args[0],))
 
     def build_and_install(self, setup_script, setup_base):
@@ -1020,11 +1071,11 @@ See the setuptools documentation for the "develop" command for more info.
                     eggs.append(self.install_egg(dist.location, setup_base))
             if not eggs and not self.dry_run:
                 log.warn("No eggs found in %s (setup script problem?)",
-                    dist_dir)
+                         dist_dir)
             return eggs
         finally:
             rmtree(dist_dir)
-            log.set_verbosity(self.verbose) # restore our log verbosity
+            log.set_verbosity(self.verbose)  # restore our log verbosity
 
     def _set_fetcher_options(self, base):
         """
@@ -1034,7 +1085,7 @@ See the setuptools documentation for the "develop" command for more info.
         are available to that command as well.
         """
         # find the fetch options from easy_install and write them out
-        #  to the setup.cfg file.
+        # to the setup.cfg file.
         ei_opts = self.distribution.get_option_dict('easy_install').copy()
         fetch_directives = (
             'find_links', 'site_dirs', 'index_url', 'optimize',
@@ -1042,7 +1093,8 @@ See the setuptools documentation for the "develop" command for more info.
         )
         fetch_options = {}
         for key, val in ei_opts.items():
-            if key not in fetch_directives: continue
+            if key not in fetch_directives:
+                continue
             fetch_options[key.replace('_', '-')] = val[1]
         # create a settings dictionary suitable for `edit_config`
         settings = dict(easy_install=fetch_options)
@@ -1053,7 +1105,7 @@ See the setuptools documentation for the "develop" command for more info.
         if self.pth_file is None:
             return
 
-        for d in self.pth_file[dist.key]:    # drop old entries
+        for d in self.pth_file[dist.key]:  # drop old entries
             if self.multi_version or d.location != dist.location:
                 log.info("Removing %s from easy-install.pth file", d)
                 self.pth_file.remove(d)
@@ -1068,7 +1120,7 @@ See the setuptools documentation for the "develop" command for more info.
                 )
             else:
                 log.info("Adding %s to easy-install.pth file", dist)
-                self.pth_file.add(dist) # add new entry
+                self.pth_file.add(dist)  # add new entry
                 if dist.location not in self.shadow_path:
                     self.shadow_path.append(dist.location)
 
@@ -1076,19 +1128,20 @@ See the setuptools documentation for the "develop" command for more info.
 
             self.pth_file.save()
 
-            if dist.key=='setuptools':
+            if dist.key == 'setuptools':
                 # Ensure that setuptools itself never becomes unavailable!
                 # XXX should this check for latest version?
-                filename = os.path.join(self.install_dir,'setuptools.pth')
-                if os.path.islink(filename): os.unlink(filename)
+                filename = os.path.join(self.install_dir, 'setuptools.pth')
+                if os.path.islink(filename):
+                    os.unlink(filename)
                 f = open(filename, 'wt')
-                f.write(self.pth_file.make_relative(dist.location)+'\n')
+                f.write(self.pth_file.make_relative(dist.location) + '\n')
                 f.close()
 
     def unpack_progress(self, src, dst):
         # Progress filter for unpacking
         log.debug("Unpacking %s to %s", src, dst)
-        return dst     # only unpack-and-compile skips files for dry run
+        return dst  # only unpack-and-compile skips files for dry run
 
     def unpack_and_compile(self, egg_path, destination):
         to_compile = []
@@ -1099,22 +1152,23 @@ See the setuptools documentation for the "develop" command for more info.
                 to_compile.append(dst)
             elif dst.endswith('.dll') or dst.endswith('.so'):
                 to_chmod.append(dst)
-            self.unpack_progress(src,dst)
+            self.unpack_progress(src, dst)
             return not self.dry_run and dst or None
 
         unpack_archive(egg_path, destination, pf)
         self.byte_compile(to_compile)
         if not self.dry_run:
             for f in to_chmod:
-                mode = ((os.stat(f)[stat.ST_MODE]) | 0x16D) & 0xFED  # 0555, 07755
+                mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
                 chmod(f, mode)
 
     def byte_compile(self, to_compile):
-        if _dont_write_bytecode:
+        if sys.dont_write_bytecode:
             self.warn('byte-compiling is disabled, skipping.')
             return
 
         from distutils.util import byte_compile
+
         try:
             # try to make the byte compile messages quieter
             log.set_verbosity(self.verbose - 1)
@@ -1126,38 +1180,41 @@ See the setuptools documentation for the "develop" command for more info.
                     dry_run=self.dry_run
                 )
         finally:
-            log.set_verbosity(self.verbose)     # restore original verbosity
+            log.set_verbosity(self.verbose)  # restore original verbosity
+
+    __no_default_msg = textwrap.dedent("""
+        bad install directory or PYTHONPATH
+
+        You are attempting to install a package to a directory that is not
+        on PYTHONPATH and which Python does not read ".pth" files from.  The
+        installation directory you specified (via --install-dir, --prefix, or
+        the distutils default setting) was:
+
+            %s
+
+        and your PYTHONPATH environment variable currently contains:
+
+            %r
+
+        Here are some of your options for correcting the problem:
+
+        * You can choose a different installation directory, i.e., one that is
+          on PYTHONPATH or supports .pth files
+
+        * You can add the installation directory to the PYTHONPATH environment
+          variable.  (It must then also be on PYTHONPATH whenever you run
+          Python and want to use the package(s) you are installing.)
+
+        * You can set up the installation directory to support ".pth" files by
+          using one of the approaches described here:
+
+          https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
+
+        Please make the appropriate changes for your system and try again.""").lstrip()
 
     def no_default_version_msg(self):
-        template = """bad install directory or PYTHONPATH
-
-You are attempting to install a package to a directory that is not
-on PYTHONPATH and which Python does not read ".pth" files from.  The
-installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
-    %s
-
-and your PYTHONPATH environment variable currently contains:
-
-    %r
-
-Here are some of your options for correcting the problem:
-
-* You can choose a different installation directory, i.e., one that is
-  on PYTHONPATH or supports .pth files
-
-* You can add the installation directory to the PYTHONPATH environment
-  variable.  (It must then also be on PYTHONPATH whenever you run
-  Python and want to use the package(s) you are installing.)
-
-* You can set up the installation directory to support ".pth" files by
-  using one of the approaches described here:
-
-  https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
-
-Please make the appropriate changes for your system and try again."""
-        return template % (self.install_dir, os.environ.get('PYTHONPATH',''))
+        template = self.__no_default_msg
+        return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
 
     def install_site_py(self):
         """Make sure there's a site.py in the target dir, if needed"""
@@ -1171,10 +1228,10 @@ Please make the appropriate changes for your system and try again."""
 
         if os.path.exists(sitepy):
             log.debug("Checking existing site.py in %s", self.install_dir)
-            f = open(sitepy,'rb')
+            f = open(sitepy, 'rb')
             current = f.read()
             # we want str, not bytes
-            if sys.version_info >= (3,):
+            if PY3:
                 current = current.decode()
 
             f.close()
@@ -1188,7 +1245,7 @@ Please make the appropriate changes for your system and try again."""
             log.info("Creating %s", sitepy)
             if not self.dry_run:
                 ensure_directory(sitepy)
-                f = open(sitepy,'wb')
+                f = open(sitepy, 'wb')
                 f.write(source)
                 f.close()
             self.byte_compile([sitepy])
@@ -1202,19 +1259,19 @@ Please make the appropriate changes for your system and try again."""
         home = convert_path(os.path.expanduser("~"))
         for name, path in iteritems(self.config_vars):
             if path.startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('%s', 0700)" % path)
-                os.makedirs(path, 0x1C0)    # 0700
+                self.debug_print("os.makedirs('%s', 0o700)" % path)
+                os.makedirs(path, 0o700)
 
     INSTALL_SCHEMES = dict(
-        posix = dict(
-            install_dir = '$base/lib/python$py_version_short/site-packages',
-            script_dir = '$base/bin',
+        posix=dict(
+            install_dir='$base/lib/python$py_version_short/site-packages',
+            script_dir='$base/bin',
         ),
     )
 
     DEFAULT_SCHEME = dict(
-        install_dir = '$base/Lib/site-packages',
-        script_dir = '$base/Scripts',
+        install_dir='$base/Lib/site-packages',
+        script_dir='$base/Scripts',
     )
 
     def _expand(self, *attrs):
@@ -1224,12 +1281,13 @@ Please make the appropriate changes for your system and try again."""
             # Set default install_dir/scripts from --prefix
             config_vars = config_vars.copy()
             config_vars['base'] = self.prefix
-            scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
-            for attr,val in scheme.items():
-                if getattr(self,attr,None) is None:
-                    setattr(self,attr,val)
+            scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
+            for attr, val in scheme.items():
+                if getattr(self, attr, None) is None:
+                    setattr(self, attr, val)
 
         from distutils.util import subst_vars
+
         for attr in attrs:
             val = getattr(self, attr)
             if val is not None:
@@ -1238,6 +1296,7 @@ Please make the appropriate changes for your system and try again."""
                     val = os.path.expanduser(val)
                 setattr(self, attr, val)
 
+
 def get_site_dirs():
     # return a list of 'site' dirs
     sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
@@ -1251,10 +1310,10 @@ def get_site_dirs():
                 sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
             elif os.sep == '/':
                 sitedirs.extend([os.path.join(prefix,
-                                         "lib",
-                                         "python" + sys.version[:3],
-                                         "site-packages"),
-                            os.path.join(prefix, "lib", "site-python")])
+                                              "lib",
+                                              "python" + sys.version[:3],
+                                              "site-packages"),
+                                 os.path.join(prefix, "lib", "site-python")])
             else:
                 sitedirs.extend(
                     [prefix, os.path.join(prefix, "lib", "site-packages")]
@@ -1274,7 +1333,8 @@ def get_site_dirs():
                                          'site-packages'))
     lib_paths = get_path('purelib'), get_path('platlib')
     for site_lib in lib_paths:
-        if site_lib not in sitedirs: sitedirs.append(site_lib)
+        if site_lib not in sitedirs:
+            sitedirs.append(site_lib)
 
     if site.ENABLE_USER_SITE:
         sitedirs.append(site.USER_SITE)
@@ -1305,12 +1365,12 @@ def expand_paths(inputs):
             if not name.endswith('.pth'):
                 # We only care about the .pth files
                 continue
-            if name in ('easy-install.pth','setuptools.pth'):
+            if name in ('easy-install.pth', 'setuptools.pth'):
                 # Ignore .pth files that we control
                 continue
 
             # Read the .pth file
-            f = open(os.path.join(dirname,name))
+            f = open(os.path.join(dirname, name))
             lines = list(yield_lines(f))
             f.close()
 
@@ -1330,7 +1390,7 @@ def extract_wininst_cfg(dist_filename):
 
     Returns a ConfigParser.RawConfigParser, or None
     """
-    f = open(dist_filename,'rb')
+    f = open(dist_filename, 'rb')
     try:
         endrec = zipfile._EndRecData(f)
         if endrec is None:
@@ -1339,21 +1399,23 @@ def extract_wininst_cfg(dist_filename):
         prepended = (endrec[9] - endrec[5]) - endrec[6]
         if prepended < 12:  # no wininst data here
             return None
-        f.seek(prepended-12)
+        f.seek(prepended - 12)
 
         from setuptools.compat import StringIO, ConfigParser
         import struct
-        tag, cfglen, bmlen = struct.unpack("= (2,6):
+            # byte.
+            if sys.version_info >= (2, 6):
                 null_byte = bytes([0])
             else:
                 null_byte = chr(0)
@@ -1386,25 +1448,25 @@ def get_exe_prefixes(exe_filename):
         for info in z.infolist():
             name = info.filename
             parts = name.split('/')
-            if len(parts)==3 and parts[2]=='PKG-INFO':
+            if len(parts) == 3 and parts[2] == 'PKG-INFO':
                 if parts[1].endswith('.egg-info'):
-                    prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
+                    prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
                     break
             if len(parts) != 2 or not name.endswith('.pth'):
                 continue
             if name.endswith('-nspkg.pth'):
                 continue
-            if parts[0].upper() in ('PURELIB','PLATLIB'):
+            if parts[0].upper() in ('PURELIB', 'PLATLIB'):
                 contents = z.read(name)
-                if sys.version_info >= (3,):
+                if PY3:
                     contents = contents.decode()
                 for pth in yield_lines(contents):
-                    pth = pth.strip().replace('\\','/')
+                    pth = pth.strip().replace('\\', '/')
                     if not pth.startswith('import'):
-                        prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
+                        prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
     finally:
         z.close()
-    prefixes = [(x.lower(),y) for x, y in prefixes]
+    prefixes = [(x.lower(), y) for x, y in prefixes]
     prefixes.sort()
     prefixes.reverse()
     return prefixes
@@ -1418,6 +1480,7 @@ def parse_requirement_arg(spec):
             "Not a URL, existing file, or requirement spec: %r" % (spec,)
         )
 
+
 class PthDistributions(Environment):
     """A .pth file with Distribution paths in it"""
 
@@ -1437,7 +1500,7 @@ class PthDistributions(Environment):
         saw_import = False
         seen = dict.fromkeys(self.sitedirs)
         if os.path.isfile(self.filename):
-            f = open(self.filename,'rt')
+            f = open(self.filename, 'rt')
             for line in f:
                 if line.startswith('import'):
                     saw_import = True
@@ -1449,17 +1512,17 @@ class PthDistributions(Environment):
                 # skip non-existent paths, in case somebody deleted a package
                 # manually, and duplicate paths as well
                 path = self.paths[-1] = normalize_path(
-                    os.path.join(self.basedir,path)
+                    os.path.join(self.basedir, path)
                 )
                 if not os.path.exists(path) or path in seen:
-                    self.paths.pop()    # skip it
-                    self.dirty = True   # we cleaned up, so we're dirty now :)
+                    self.paths.pop()  # skip it
+                    self.dirty = True  # we cleaned up, so we're dirty now :)
                     continue
                 seen[path] = 1
             f.close()
 
         if self.paths and not saw_import:
-            self.dirty = True   # ensure anything we touch has import wrappers
+            self.dirty = True  # ensure anything we touch has import wrappers
         while self.paths and not self.paths[-1].strip():
             self.paths.pop()
 
@@ -1468,7 +1531,7 @@ class PthDistributions(Environment):
         if not self.dirty:
             return
 
-        data = '\n'.join(map(self.make_relative,self.paths))
+        data = '\n'.join(map(self.make_relative, self.paths))
         if data:
             log.debug("Saving %s", self.filename)
             data = (
@@ -1482,7 +1545,7 @@ class PthDistributions(Environment):
 
             if os.path.islink(self.filename):
                 os.unlink(self.filename)
-            f = open(self.filename,'wt')
+            f = open(self.filename, 'wt')
             f.write(data)
             f.close()
 
@@ -1494,10 +1557,14 @@ class PthDistributions(Environment):
 
     def add(self, dist):
         """Add `dist` to the distribution map"""
-        if (dist.location not in self.paths and (
+        new_path = (
+            dist.location not in self.paths and (
                 dist.location not in self.sitedirs or
-                dist.location == os.getcwd() # account for '.' being in PYTHONPATH
-                )):
+                # account for '.' being in PYTHONPATH
+                dist.location == os.getcwd()
+            )
+        )
+        if new_path:
             self.paths.append(dist.location)
             self.dirty = True
         Environment.add(self, dist)
@@ -1509,13 +1576,13 @@ class PthDistributions(Environment):
             self.dirty = True
         Environment.remove(self, dist)
 
-    def make_relative(self,path):
+    def make_relative(self, path):
         npath, last = os.path.split(normalize_path(path))
         baselen = len(self.basedir)
         parts = [last]
-        sep = os.altsep=='/' and '/' or os.sep
-        while len(npath)>=baselen:
-            if npath==self.basedir:
+        sep = os.altsep == '/' and '/' or os.sep
+        while len(npath) >= baselen:
+            if npath == self.basedir:
                 parts.append(os.curdir)
                 parts.reverse()
                 return sep.join(parts)
@@ -1524,59 +1591,203 @@ class PthDistributions(Environment):
         else:
             return path
 
-def get_script_header(script_text, executable=sys_executable, wininst=False):
-    """Create a #! line, getting options (if any) from script_text"""
-    from distutils.command.build_scripts import first_line_re
+
+def _first_line_re():
+    """
+    Return a regular expression based on first_line_re suitable for matching
+    strings.
+    """
+    if isinstance(first_line_re.pattern, str):
+        return first_line_re
 
     # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
-    if not isinstance(first_line_re.pattern, str):
-        first_line_re = re.compile(first_line_re.pattern.decode())
+    return re.compile(first_line_re.pattern.decode())
 
-    first = (script_text+'\n').splitlines()[0]
-    match = first_line_re.match(first)
-    options = ''
-    if match:
-        options = match.group(1) or ''
-        if options: options = ' '+options
-    if wininst:
-        executable = "python.exe"
-    else:
-        executable = nt_quote_arg(executable)
-    hdr = "#!%(executable)s%(options)s\n" % locals()
-    if not isascii(hdr):
-        # Non-ascii path to sys.executable, use -x to prevent warnings
-        if options:
-            if options.strip().startswith('-'):
-                options = ' -x'+options.strip()[1:]
-            # else: punt, we can't do it, let the warning happen anyway
-        else:
-            options = ' -x'
-    executable = fix_jython_executable(executable, options)
-    hdr = "#!%(executable)s%(options)s\n" % locals()
-    return hdr
 
 def auto_chmod(func, arg, exc):
-    if func is os.remove and os.name=='nt':
+    if func is os.remove and os.name == 'nt':
         chmod(arg, stat.S_IWRITE)
         return func(arg)
     et, ev, _ = sys.exc_info()
-    reraise(et, (ev[0], ev[1] + (" %s %s" % (func,arg))))
+    reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
 
-def uncache_zipdir(path):
-    """Ensure that the importer caches dont have stale info for `path`"""
-    from zipimport import _zip_directory_cache as zdc
-    _uncache(path, zdc)
-    _uncache(path, sys.path_importer_cache)
 
-def _uncache(path, cache):
-    if path in cache:
-        del cache[path]
+def update_dist_caches(dist_path, fix_zipimporter_caches):
+    """
+    Fix any globally cached `dist_path` related data
+
+    `dist_path` should be a path of a newly installed egg distribution (zipped
+    or unzipped).
+
+    sys.path_importer_cache contains finder objects that have been cached when
+    importing data from the original distribution. Any such finders need to be
+    cleared since the replacement distribution might be packaged differently,
+    e.g. a zipped egg distribution might get replaced with an unzipped egg
+    folder or vice versa. Having the old finders cached may then cause Python
+    to attempt loading modules from the replacement distribution using an
+    incorrect loader.
+
+    zipimport.zipimporter objects are Python loaders charged with importing
+    data packaged inside zip archives. If stale loaders referencing the
+    original distribution, are left behind, they can fail to load modules from
+    the replacement distribution. E.g. if an old zipimport.zipimporter instance
+    is used to load data from a new zipped egg archive, it may cause the
+    operation to attempt to locate the requested data in the wrong location -
+    one indicated by the original distribution's zip archive directory
+    information. Such an operation may then fail outright, e.g. report having
+    read a 'bad local file header', or even worse, it may fail silently &
+    return invalid data.
+
+    zipimport._zip_directory_cache contains cached zip archive directory
+    information for all existing zipimport.zipimporter instances and all such
+    instances connected to the same archive share the same cached directory
+    information.
+
+    If asked, and the underlying Python implementation allows it, we can fix
+    all existing zipimport.zipimporter instances instead of having to track
+    them down and remove them one by one, by updating their shared cached zip
+    archive directory information. This, of course, assumes that the
+    replacement distribution is packaged as a zipped egg.
+
+    If not asked to fix existing zipimport.zipimporter instances, we still do
+    our best to clear any remaining zipimport.zipimporter related cached data
+    that might somehow later get used when attempting to load data from the new
+    distribution and thus cause such load operations to fail. Note that when
+    tracking down such remaining stale data, we can not catch every conceivable
+    usage from here, and we clear only those that we know of and have found to
+    cause problems if left alive. Any remaining caches should be updated by
+    whomever is in charge of maintaining them, i.e. they should be ready to
+    handle us replacing their zip archives with new distributions at runtime.
+
+    """
+    # There are several other known sources of stale zipimport.zipimporter
+    # instances that we do not clear here, but might if ever given a reason to
+    # do so:
+    # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
+    # set') may contain distributions which may in turn contain their
+    #   zipimport.zipimporter loaders.
+    # * Several zipimport.zipimporter loaders held by local variables further
+    #   up the function call stack when running the setuptools installation.
+    # * Already loaded modules may have their __loader__ attribute set to the
+    #   exact loader instance used when importing them. Python 3.4 docs state
+    #   that this information is intended mostly for introspection and so is
+    #   not expected to cause us problems.
+    normalized_path = normalize_path(dist_path)
+    _uncache(normalized_path, sys.path_importer_cache)
+    if fix_zipimporter_caches:
+        _replace_zip_directory_cache_data(normalized_path)
     else:
-        path = normalize_path(path)
-        for p in cache:
-            if normalize_path(p)==path:
-                del cache[p]
-                return
+        # Here, even though we do not want to fix existing and now stale
+        # zipimporter cache information, we still want to remove it. Related to
+        # Python's zip archive directory information cache, we clear each of
+        # its stale entries in two phases:
+        #   1. Clear the entry so attempting to access zip archive information
+        #      via any existing stale zipimport.zipimporter instances fails.
+        #   2. Remove the entry from the cache so any newly constructed
+        #      zipimport.zipimporter instances do not end up using old stale
+        #      zip archive directory information.
+        # This whole stale data removal step does not seem strictly necessary,
+        # but has been left in because it was done before we started replacing
+        # the zip archive directory information cache content if possible, and
+        # there are no relevant unit tests that we can depend on to tell us if
+        # this is really needed.
+        _remove_and_clear_zip_directory_cache_data(normalized_path)
+
+
+def _collect_zipimporter_cache_entries(normalized_path, cache):
+    """
+    Return zipimporter cache entry keys related to a given normalized path.
+
+    Alternative path spellings (e.g. those using different character case or
+    those using alternative path separators) related to the same path are
+    included. Any sub-path entries are included as well, i.e. those
+    corresponding to zip archives embedded in other zip archives.
+
+    """
+    result = []
+    prefix_len = len(normalized_path)
+    for p in cache:
+        np = normalize_path(p)
+        if (np.startswith(normalized_path) and
+                np[prefix_len:prefix_len + 1] in (os.sep, '')):
+            result.append(p)
+    return result
+
+
+def _update_zipimporter_cache(normalized_path, cache, updater=None):
+    """
+    Update zipimporter cache data for a given normalized path.
+
+    Any sub-path entries are processed as well, i.e. those corresponding to zip
+    archives embedded in other zip archives.
+
+    Given updater is a callable taking a cache entry key and the original entry
+    (after already removing the entry from the cache), and expected to update
+    the entry and possibly return a new one to be inserted in its place.
+    Returning None indicates that the entry should not be replaced with a new
+    one. If no updater is given, the cache entries are simply removed without
+    any additional processing, the same as if the updater simply returned None.
+
+    """
+    for p in _collect_zipimporter_cache_entries(normalized_path, cache):
+        # N.B. pypy's custom zipimport._zip_directory_cache implementation does
+        # not support the complete dict interface:
+        # * Does not support item assignment, thus not allowing this function
+        #    to be used only for removing existing cache entries.
+        #  * Does not support the dict.pop() method, forcing us to use the
+        #    get/del patterns instead. For more detailed information see the
+        #    following links:
+        #      https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960
+        #      https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
+        old_entry = cache[p]
+        del cache[p]
+        new_entry = updater and updater(p, old_entry)
+        if new_entry is not None:
+            cache[p] = new_entry
+
+
+def _uncache(normalized_path, cache):
+    _update_zipimporter_cache(normalized_path, cache)
+
+
+def _remove_and_clear_zip_directory_cache_data(normalized_path):
+    def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
+        old_entry.clear()
+
+    _update_zipimporter_cache(
+        normalized_path, zipimport._zip_directory_cache,
+        updater=clear_and_remove_cached_zip_archive_directory_data)
+
+# PyPy Python implementation does not allow directly writing to the
+# zipimport._zip_directory_cache and so prevents us from attempting to correct
+# its content. The best we can do there is clear the problematic cache content
+# and have PyPy repopulate it as needed. The downside is that if there are any
+# stale zipimport.zipimporter instances laying around, attempting to use them
+# will fail due to not having its zip archive directory information available
+# instead of being automatically corrected to use the new correct zip archive
+# directory information.
+if '__pypy__' in sys.builtin_module_names:
+    _replace_zip_directory_cache_data = \
+        _remove_and_clear_zip_directory_cache_data
+else:
+    def _replace_zip_directory_cache_data(normalized_path):
+        def replace_cached_zip_archive_directory_data(path, old_entry):
+            # N.B. In theory, we could load the zip directory information just
+            # once for all updated path spellings, and then copy it locally and
+            # update its contained path strings to contain the correct
+            # spelling, but that seems like a way too invasive move (this cache
+            # structure is not officially documented anywhere and could in
+            # theory change with new Python releases) for no significant
+            # benefit.
+            old_entry.clear()
+            zipimport.zipimporter(path)
+            old_entry.update(zipimport._zip_directory_cache[path])
+            return old_entry
+
+        _update_zipimporter_cache(
+            normalized_path, zipimport._zip_directory_cache,
+            updater=replace_cached_zip_archive_directory_data)
+
 
 def is_python(text, filename=''):
     "Is this string a valid Python script?"
@@ -1587,94 +1798,181 @@ def is_python(text, filename=''):
     else:
         return True
 
+
 def is_sh(executable):
     """Determine if the specified executable is a .sh (contains a #! line)"""
     try:
-        fp = open(executable)
-        magic = fp.read(2)
-        fp.close()
-    except (OSError,IOError): return executable
+        with io.open(executable, encoding='latin-1') as fp:
+            magic = fp.read(2)
+    except (OSError, IOError):
+        return executable
     return magic == '#!'
 
+
 def nt_quote_arg(arg):
     """Quote a command line argument according to Windows parsing rules"""
+    return subprocess.list2cmdline([arg])
 
-    result = []
-    needquote = False
-    nb = 0
-
-    needquote = (" " in arg) or ("\t" in arg)
-    if needquote:
-        result.append('"')
-
-    for c in arg:
-        if c == '\\':
-            nb += 1
-        elif c == '"':
-            # double preceding backslashes, then add a \"
-            result.append('\\' * (nb*2) + '\\"')
-            nb = 0
-        else:
-            if nb:
-                result.append('\\' * nb)
-                nb = 0
-            result.append(c)
-
-    if nb:
-        result.append('\\' * nb)
-
-    if needquote:
-        result.append('\\' * nb)    # double the trailing backslashes
-        result.append('"')
-
-    return ''.join(result)
 
 def is_python_script(script_text, filename):
     """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
     """
     if filename.endswith('.py') or filename.endswith('.pyw'):
-        return True     # extension says it's Python
+        return True  # extension says it's Python
     if is_python(script_text, filename):
-        return True     # it's syntactically valid Python
+        return True  # it's syntactically valid Python
     if script_text.startswith('#!'):
         # It begins with a '#!' line, so check if 'python' is in it somewhere
         return 'python' in script_text.splitlines()[0].lower()
 
-    return False    # Not any Python I can recognize
+    return False  # Not any Python I can recognize
+
 
 try:
     from os import chmod as _chmod
 except ImportError:
     # Jython compatibility
-    def _chmod(*args): pass
+    def _chmod(*args):
+        pass
+
 
 def chmod(path, mode):
     log.debug("changing mode of %s to %o", path, mode)
     try:
         _chmod(path, mode)
-    except os.error:
-        e = sys.exc_info()[1]
+    except os.error as e:
         log.debug("chmod failed: %s", e)
 
-def fix_jython_executable(executable, options):
-    if sys.platform.startswith('java') and is_sh(executable):
-        # Workaround for Jython is not needed on Linux systems.
-        import java
-        if java.lang.System.getProperty("os.name") == "Linux":
-            return executable
 
-        # Workaround Jython's sys.executable being a .sh (an invalid
-        # shebang line interpreter)
-        if options:
+def fix_jython_executable(executable, options):
+    warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2)
+
+    if not JythonCommandSpec.relevant():
+        return executable
+
+    cmd = CommandSpec.best().from_param(executable)
+    cmd.install_options(options)
+    return cmd.as_header().lstrip('#!').rstrip('\n')
+
+
+class CommandSpec(list):
+    """
+    A command spec for a #! header, specified as a list of arguments akin to
+    those passed to Popen.
+    """
+
+    options = []
+    split_args = dict()
+
+    @classmethod
+    def best(cls):
+        """
+        Choose the best CommandSpec class based on environmental conditions.
+        """
+        return cls if not JythonCommandSpec.relevant() else JythonCommandSpec
+
+    @classmethod
+    def _sys_executable(cls):
+        _default = os.path.normpath(sys.executable)
+        return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+    @classmethod
+    def from_param(cls, param):
+        """
+        Construct a CommandSpec from a parameter to build_scripts, which may
+        be None.
+        """
+        if isinstance(param, cls):
+            return param
+        if isinstance(param, list):
+            return cls(param)
+        if param is None:
+            return cls.from_environment()
+        # otherwise, assume it's a string.
+        return cls.from_string(param)
+
+    @classmethod
+    def from_environment(cls):
+        return cls([cls._sys_executable()])
+
+    @classmethod
+    def from_string(cls, string):
+        """
+        Construct a command spec from a simple string representing a command
+        line parseable by shlex.split.
+        """
+        items = shlex.split(string, **cls.split_args)
+        return cls(items)
+
+    def install_options(self, script_text):
+        self.options = shlex.split(self._extract_options(script_text))
+        cmdline = subprocess.list2cmdline(self)
+        if not isascii(cmdline):
+            self.options[:0] = ['-x']
+
+    @staticmethod
+    def _extract_options(orig_script):
+        """
+        Extract any options from the first line of the script.
+        """
+        first = (orig_script + '\n').splitlines()[0]
+        match = _first_line_re().match(first)
+        options = match.group(1) or '' if match else ''
+        return options.strip()
+
+    def as_header(self):
+        return self._render(self + list(self.options))
+
+    @staticmethod
+    def _render(items):
+        cmdline = subprocess.list2cmdline(items)
+        return '#!' + cmdline + '\n'
+
+# For pbr compat; will be removed in a future version.
+sys_executable = CommandSpec._sys_executable()
+
+
+class WindowsCommandSpec(CommandSpec):
+    split_args = dict(posix=False)
+
+
+class JythonCommandSpec(CommandSpec):
+    @classmethod
+    def relevant(cls):
+        return (
+            sys.platform.startswith('java')
+            and
+            __import__('java').lang.System.getProperty('os.name') != 'Linux'
+        )
+
+    @classmethod
+    def from_environment(cls):
+        string = '"' + cls._sys_executable() + '"'
+        return cls.from_string(string)
+
+    @classmethod
+    def from_string(cls, string):
+        return cls([string])
+
+    def as_header(self):
+        """
+        Workaround Jython's sys.executable being a .sh (an invalid
+        shebang line interpreter)
+        """
+        if not is_sh(self[0]):
+            return super(JythonCommandSpec, self).as_header()
+
+        if self.options:
             # Can't apply the workaround, leave it broken
             log.warn(
                 "WARNING: Unable to adapt shebang line for Jython,"
                 " the following script is NOT executable\n"
                 "         see http://bugs.jython.org/issue1112 for"
                 " more information.")
-        else:
-            return '/usr/bin/env %s' % executable
-    return executable
+            return super(JythonCommandSpec, self).as_header()
+
+        items = ['/usr/bin/env'] + self + list(self.options)
+        return self._render(items)
 
 
 class ScriptWriter(object):
@@ -1695,39 +1993,81 @@ class ScriptWriter(object):
             )
     """).lstrip()
 
+    command_spec_class = CommandSpec
+
     @classmethod
-    def get_script_args(cls, dist, executable=sys_executable, wininst=False):
+    def get_script_args(cls, dist, executable=None, wininst=False):
+        # for backward compatibility
+        warnings.warn("Use get_args", DeprecationWarning)
+        writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
+        header = cls.get_script_header("", executable, wininst)
+        return writer.get_args(dist, header)
+
+    @classmethod
+    def get_script_header(cls, script_text, executable=None, wininst=False):
+        # for backward compatibility
+        warnings.warn("Use get_header", DeprecationWarning)
+        if wininst:
+            executable = "python.exe"
+        cmd = cls.command_spec_class.best().from_param(executable)
+        cmd.install_options(script_text)
+        return cmd.as_header()
+
+    @classmethod
+    def get_args(cls, dist, header=None):
         """
         Yield write_script() argument tuples for a distribution's entrypoints
         """
-        gen_class = cls.get_writer(wininst)
+        if header is None:
+            header = cls.get_header()
         spec = str(dist.as_requirement())
-        header = get_script_header("", executable, wininst)
         for type_ in 'console', 'gui':
             group = type_ + '_scripts'
             for name, ep in dist.get_entry_map(group).items():
-                script_text = gen_class.template % locals()
-                for res in gen_class._get_script_args(type_, name, header,
+                script_text = cls.template % locals()
+                for res in cls._get_script_args(type_, name, header,
                         script_text):
                     yield res
 
     @classmethod
     def get_writer(cls, force_windows):
-        if force_windows or sys.platform=='win32':
-            return WindowsScriptWriter.get_writer()
-        return cls
+        # for backward compatibility
+        warnings.warn("Use best", DeprecationWarning)
+        return WindowsScriptWriter.best() if force_windows else cls.best()
+
+    @classmethod
+    def best(cls):
+        """
+        Select the best ScriptWriter for this environment.
+        """
+        return WindowsScriptWriter.best() if sys.platform == 'win32' else cls
 
     @classmethod
     def _get_script_args(cls, type_, name, header, script_text):
         # Simply write the stub with no extension.
-        yield (name, header+script_text)
+        yield (name, header + script_text)
+
+    @classmethod
+    def get_header(cls, script_text="", executable=None):
+        """Create a #! line, getting options (if any) from script_text"""
+        cmd = cls.command_spec_class.best().from_param(executable)
+        cmd.install_options(script_text)
+        return cmd.as_header()
 
 
 class WindowsScriptWriter(ScriptWriter):
+    command_spec_class = WindowsCommandSpec
+
     @classmethod
     def get_writer(cls):
+        # for backward compatibility
+        warnings.warn("Use best", DeprecationWarning)
+        return cls.best()
+
+    @classmethod
+    def best(cls):
         """
-        Get a script writer suitable for Windows
+        Select the best ScriptWriter suitable for Windows
         """
         writer_lookup = dict(
             executable=WindowsExecutableLauncherWriter,
@@ -1743,12 +2083,12 @@ class WindowsScriptWriter(ScriptWriter):
         ext = dict(console='.pya', gui='.pyw')[type_]
         if ext not in os.environ['PATHEXT'].lower().split(';'):
             warnings.warn("%s not listed in PATHEXT; scripts will not be "
-                "recognized as executables." % ext, UserWarning)
+                          "recognized as executables." % ext, UserWarning)
         old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
         old.remove(ext)
         header = cls._adjust_header(type_, header)
-        blockers = [name+x for x in old]
-        yield name+ext, header+script_text, 't', blockers
+        blockers = [name + x for x in old]
+        yield name + ext, header + script_text, 't', blockers
 
     @staticmethod
     def _adjust_header(type_, orig_header):
@@ -1775,32 +2115,35 @@ class WindowsExecutableLauncherWriter(WindowsScriptWriter):
         """
         For Windows, add a .py extension and an .exe launcher
         """
-        if type_=='gui':
+        if type_ == 'gui':
             launcher_type = 'gui'
             ext = '-script.pyw'
             old = ['.pyw']
         else:
             launcher_type = 'cli'
             ext = '-script.py'
-            old = ['.py','.pyc','.pyo']
+            old = ['.py', '.pyc', '.pyo']
         hdr = cls._adjust_header(type_, header)
-        blockers = [name+x for x in old]
-        yield (name+ext, hdr+script_text, 't', blockers)
+        blockers = [name + x for x in old]
+        yield (name + ext, hdr + script_text, 't', blockers)
         yield (
-            name+'.exe', get_win_launcher(launcher_type),
-            'b' # write in binary mode
+            name + '.exe', get_win_launcher(launcher_type),
+            'b'  # write in binary mode
         )
         if not is_64bit():
             # install a manifest for the launcher to prevent Windows
-            #  from detecting it as an installer (which it will for
+            # from detecting it as an installer (which it will for
             #  launchers like easy_install.exe). Consider only
             #  adding a manifest for launchers detected as installers.
             #  See Distribute #143 for details.
             m_name = name + '.exe.manifest'
             yield (m_name, load_launcher_manifest(name), 't')
 
+
 # for backward-compatibility
 get_script_args = ScriptWriter.get_script_args
+get_script_header = ScriptWriter.get_script_header
+
 
 def get_win_launcher(type):
     """
@@ -1811,7 +2154,7 @@ def get_win_launcher(type):
     Returns the executable as a byte string.
     """
     launcher_fn = '%s.exe' % type
-    if platform.machine().lower()=='arm':
+    if platform.machine().lower() == 'arm':
         launcher_fn = launcher_fn.replace(".", "-arm.")
     if is_64bit():
         launcher_fn = launcher_fn.replace(".", "-64.")
@@ -1819,13 +2162,15 @@ def get_win_launcher(type):
         launcher_fn = launcher_fn.replace(".", "-32.")
     return resource_string('setuptools', launcher_fn)
 
+
 def load_launcher_manifest(name):
     manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
-    if sys.version_info[0] < 3:
+    if PY2:
         return manifest % vars()
     else:
         return manifest.decode('utf-8') % vars()
 
+
 def rmtree(path, ignore_errors=False, onerror=auto_chmod):
     """Recursively delete a directory tree.
 
@@ -1861,55 +2206,61 @@ def rmtree(path, ignore_errors=False, onerror=auto_chmod):
     except os.error:
         onerror(os.rmdir, path, sys.exc_info())
 
+
 def current_umask():
-    tmp = os.umask(0x12)    # 022
+    tmp = os.umask(0o022)
     os.umask(tmp)
     return tmp
 
+
 def bootstrap():
     # This function is called when setuptools*.egg is run using /bin/sh
     import setuptools
+
     argv0 = os.path.dirname(setuptools.__path__[0])
     sys.argv[0] = argv0
     sys.argv.append(argv0)
     main()
 
+
 def main(argv=None, **kw):
     from setuptools import setup
     from setuptools.dist import Distribution
-    import distutils.core
 
-    USAGE = """\
-usage: %(script)s [options] requirement_or_url ...
-   or: %(script)s --help
-"""
+    class DistributionWithoutHelpCommands(Distribution):
+        common_usage = ""
+
+        def _show_help(self, *args, **kw):
+            with _patch_usage():
+                Distribution._show_help(self, *args, **kw)
+
+    if argv is None:
+        argv = sys.argv[1:]
+
+    with _patch_usage():
+        setup(
+            script_args=['-q', 'easy_install', '-v'] + argv,
+            script_name=sys.argv[0] or 'easy_install',
+            distclass=DistributionWithoutHelpCommands, **kw
+        )
+
+
+@contextlib.contextmanager
+def _patch_usage():
+    import distutils.core
+    USAGE = textwrap.dedent("""
+        usage: %(script)s [options] requirement_or_url ...
+           or: %(script)s --help
+        """).lstrip()
 
     def gen_usage(script_name):
         return USAGE % dict(
             script=os.path.basename(script_name),
         )
 
-    def with_ei_usage(f):
-        old_gen_usage = distutils.core.gen_usage
-        try:
-            distutils.core.gen_usage = gen_usage
-            return f()
-        finally:
-            distutils.core.gen_usage = old_gen_usage
-
-    class DistributionWithoutHelpCommands(Distribution):
-        common_usage = ""
-
-        def _show_help(self,*args,**kw):
-            with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
-
-    if argv is None:
-        argv = sys.argv[1:]
-
-    with_ei_usage(lambda:
-        setup(
-            script_args = ['-q','easy_install', '-v']+argv,
-            script_name = sys.argv[0] or 'easy_install',
-            distclass=DistributionWithoutHelpCommands, **kw
-        )
-    )
+    saved = distutils.core.gen_usage
+    distutils.core.gen_usage = gen_usage
+    try:
+        yield
+    finally:
+        distutils.core.gen_usage = saved
diff --git a/awx/lib/site-packages/setuptools/command/egg_info.py b/awx/lib/site-packages/setuptools/command/egg_info.py
index 5953aad4f6..a9940677ea 100644
--- a/awx/lib/site-packages/setuptools/command/egg_info.py
+++ b/awx/lib/site-packages/setuptools/command/egg_info.py
@@ -2,22 +2,30 @@
 
 Create a distribution's .egg-info directory and contents"""
 
+from distutils.filelist import FileList as _FileList
+from distutils.util import convert_path
+from distutils import log
+import distutils.errors
+import distutils.filelist
 import os
 import re
 import sys
 
-from setuptools import Command
-import distutils.errors
-from distutils import log
-from setuptools.command.sdist import sdist
-from setuptools.compat import basestring
-from setuptools import svn_utils
-from distutils.util import convert_path
-from distutils.filelist import FileList as _FileList
-from pkg_resources import (parse_requirements, safe_name, parse_version,
-    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
-from setuptools.command.sdist import walk_revctrl
+try:
+    from setuptools_svn import svn_utils
+except ImportError:
+    pass
 
+from setuptools import Command
+from setuptools.command.sdist import sdist
+from setuptools.compat import basestring, PY3, StringIO
+from setuptools.command.sdist import walk_revctrl
+from pkg_resources import (
+    parse_requirements, safe_name, parse_version,
+    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
+import setuptools.unicode_utils as unicode_utils
+
+from pkg_resources import packaging
 
 class egg_info(Command):
     description = "create a distribution's .egg-info directory"
@@ -26,11 +34,11 @@ class egg_info(Command):
         ('egg-base=', 'e', "directory containing .egg-info directories"
                            " (default: top of the source tree)"),
         ('tag-svn-revision', 'r',
-            "Add subversion revision ID to version number"),
+         "Add subversion revision ID to version number"),
         ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
         ('tag-build=', 'b', "Specify explicit tag to add to version number"),
         ('no-svn-revision', 'R',
-            "Don't add subversion revision ID [default]"),
+         "Don't add subversion revision ID [default]"),
         ('no-date', 'D', "Don't include date stamp [default]"),
     ]
 
@@ -51,6 +59,7 @@ class egg_info(Command):
 
     def save_version_info(self, filename):
         from setuptools.command.setopt import edit_config
+
         values = dict(
             egg_info=dict(
                 tag_svn_revision=0,
@@ -65,25 +74,32 @@ class egg_info(Command):
         self.vtags = self.tags()
         self.egg_version = self.tagged_version()
 
+        parsed_version = parse_version(self.egg_version)
+
         try:
+            is_version = isinstance(parsed_version, packaging.version.Version)
+            spec = (
+                "%s==%s" if is_version else "%s===%s"
+            )
             list(
-                parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
+                parse_requirements(spec % (self.egg_name, self.egg_version))
             )
         except ValueError:
             raise distutils.errors.DistutilsOptionError(
                 "Invalid distribution name or version syntax: %s-%s" %
-                (self.egg_name,self.egg_version)
+                (self.egg_name, self.egg_version)
             )
 
         if self.egg_base is None:
             dirs = self.distribution.package_dir
-            self.egg_base = (dirs or {}).get('',os.curdir)
+            self.egg_base = (dirs or {}).get('', os.curdir)
 
         self.ensure_dirname('egg_base')
-        self.egg_info = to_filename(self.egg_name)+'.egg-info'
+        self.egg_info = to_filename(self.egg_name) + '.egg-info'
         if self.egg_base != os.curdir:
             self.egg_info = os.path.join(self.egg_base, self.egg_info)
-        if '-' in self.egg_name: self.check_broken_egg_info()
+        if '-' in self.egg_name:
+            self.check_broken_egg_info()
 
         # Set package version for the benefit of dumber commands
         # (e.g. sdist, bdist_wininst, etc.)
@@ -95,7 +111,7 @@ class egg_info(Command):
         # to the version info
         #
         pd = self.distribution._patched_dist
-        if pd is not None and pd.key==self.egg_name.lower():
+        if pd is not None and pd.key == self.egg_name.lower():
             pd._version = self.egg_version
             pd._parsed_version = parse_version(self.egg_version)
             self.distribution._patched_dist = None
@@ -127,7 +143,7 @@ class egg_info(Command):
         to the file.
         """
         log.info("writing %s to %s", what, filename)
-        if sys.version_info >= (3,):
+        if PY3:
             data = data.encode("utf-8")
         if not self.dry_run:
             f = open(filename, 'wb')
@@ -153,7 +169,7 @@ class egg_info(Command):
         installer = self.distribution.fetch_build_egg
         for ep in iter_entry_points('egg_info.writers'):
             writer = ep.load(installer=installer)
-            writer(self, ep.name, os.path.join(self.egg_info,ep.name))
+            writer(self, ep.name, os.path.join(self.egg_info, ep.name))
 
         # Get rid of native_libs.txt if it was put there by older bdist_egg
         nl = os.path.join(self.egg_info, "native_libs.txt")
@@ -165,68 +181,96 @@ class egg_info(Command):
     def tags(self):
         version = ''
         if self.tag_build:
-            version+=self.tag_build
-        if self.tag_svn_revision and (
-            os.path.exists('.svn') or os.path.exists('PKG-INFO')
-        ):  version += '-r%s' % self.get_svn_revision()
+            version += self.tag_build
+        if self.tag_svn_revision:
+            rev = self.get_svn_revision()
+            if rev:     # is 0 if it's not an svn working copy
+                version += '-r%s' % rev
         if self.tag_date:
             import time
+
             version += time.strftime("-%Y%m%d")
         return version
 
     @staticmethod
     def get_svn_revision():
+        if 'svn_utils' not in globals():
+            return "0"
         return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
 
     def find_sources(self):
         """Generate SOURCES.txt manifest file"""
-        manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
+        manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
         mm = manifest_maker(self.distribution)
         mm.manifest = manifest_filename
         mm.run()
         self.filelist = mm.filelist
 
     def check_broken_egg_info(self):
-        bei = self.egg_name+'.egg-info'
+        bei = self.egg_name + '.egg-info'
         if self.egg_base != os.curdir:
             bei = os.path.join(self.egg_base, bei)
         if os.path.exists(bei):
             log.warn(
-                "-"*78+'\n'
+                "-" * 78 + '\n'
                 "Note: Your current .egg-info directory has a '-' in its name;"
                 '\nthis will not work correctly with "setup.py develop".\n\n'
-                'Please rename %s to %s to correct this problem.\n'+'-'*78,
+                'Please rename %s to %s to correct this problem.\n' + '-' * 78,
                 bei, self.egg_info
             )
             self.broken_egg_info = self.egg_info
-            self.egg_info = bei     # make it work for now
+            self.egg_info = bei  # make it work for now
+
 
 class FileList(_FileList):
     """File list that accepts only existing, platform-independent paths"""
 
     def append(self, item):
-        if item.endswith('\r'):     # Fix older sdists built on Windows
+        if item.endswith('\r'):  # Fix older sdists built on Windows
             item = item[:-1]
         path = convert_path(item)
 
-        if sys.version_info >= (3,):
-            try:
-                if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
-                    self.files.append(path)
-            except UnicodeEncodeError:
-                # Accept UTF-8 filenames even if LANG=C
-                if os.path.exists(path.encode('utf-8')):
-                    self.files.append(path)
-                else:
-                    log.warn("'%s' not %s encodable -- skipping", path,
-                        sys.getfilesystemencoding())
-        else:
-            if os.path.exists(path):
-                self.files.append(path)
+        if self._safe_path(path):
+            self.files.append(path)
+
+    def extend(self, paths):
+        self.files.extend(filter(self._safe_path, paths))
+
+    def _repair(self):
+        """
+        Replace self.files with only safe paths
+
+        Because some owners of FileList manipulate the underlying
+        ``files`` attribute directly, this method must be called to
+        repair those paths.
+        """
+        self.files = list(filter(self._safe_path, self.files))
+
+    def _safe_path(self, path):
+        enc_warn = "'%s' not %s encodable -- skipping"
+
+        # To avoid accidental trans-codings errors, first to unicode
+        u_path = unicode_utils.filesys_decode(path)
+        if u_path is None:
+            log.warn("'%s' in unexpected encoding -- skipping" % path)
+            return False
+
+        # Must ensure utf-8 encodability
+        utf8_path = unicode_utils.try_encode(u_path, "utf-8")
+        if utf8_path is None:
+            log.warn(enc_warn, path, 'utf-8')
+            return False
+
+        try:
+            # accept is either way checks out
+            if os.path.exists(u_path) or os.path.exists(utf8_path):
+                return True
+        # this will catch any encode errors decoding u_path
+        except UnicodeEncodeError:
+            log.warn(enc_warn, path, sys.getfilesystemencoding())
 
 
 class manifest_maker(sdist):
-
     template = "MANIFEST.in"
 
     def initialize_options(self):
@@ -241,7 +285,7 @@ class manifest_maker(sdist):
     def run(self):
         self.filelist = FileList()
         if not os.path.exists(self.manifest):
-            self.write_manifest()   # it must exist so it'll get in the list
+            self.write_manifest()  # it must exist so it'll get in the list
         self.filelist.findall()
         self.add_defaults()
         if os.path.exists(self.template):
@@ -251,30 +295,23 @@ class manifest_maker(sdist):
         self.filelist.remove_duplicates()
         self.write_manifest()
 
+    def _manifest_normalize(self, path):
+        path = unicode_utils.filesys_decode(path)
+        return path.replace(os.sep, '/')
+
     def write_manifest(self):
-        """Write the file list in 'self.filelist' (presumably as filled in
-        by 'add_defaults()' and 'read_template()') to the manifest file
+        """
+        Write the file list in 'self.filelist' to the manifest file
         named by 'self.manifest'.
         """
-        # The manifest must be UTF-8 encodable. See #303.
-        if sys.version_info >= (3,):
-            files = []
-            for file in self.filelist.files:
-                try:
-                    file.encode("utf-8")
-                except UnicodeEncodeError:
-                    log.warn("'%s' not UTF-8 encodable -- skipping" % file)
-                else:
-                    files.append(file)
-            self.filelist.files = files
+        self.filelist._repair()
 
-        files = self.filelist.files
-        if os.sep!='/':
-            files = [f.replace(os.sep,'/') for f in files]
-        self.execute(write_file, (self.manifest, files),
-                     "writing manifest file '%s'" % self.manifest)
+        # Now _repairs should encodability, but not unicode
+        files = [self._manifest_normalize(f) for f in self.filelist.files]
+        msg = "writing manifest file '%s'" % self.manifest
+        self.execute(write_file, (self.manifest, files), msg)
 
-    def warn(self, msg):    # suppress missing-file warnings from sdist
+    def warn(self, msg):  # suppress missing-file warnings from sdist
         if not msg.startswith("standard file not found:"):
             sdist.warn(self, msg)
 
@@ -288,15 +325,41 @@ class manifest_maker(sdist):
         elif os.path.exists(self.manifest):
             self.read_manifest()
         ei_cmd = self.get_finalized_command('egg_info')
+        self._add_egg_info(cmd=ei_cmd)
         self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
 
+    def _add_egg_info(self, cmd):
+        """
+        Add paths for egg-info files for an external egg-base.
+
+        The egg-info files are written to egg-base. If egg-base is
+        outside the current working directory, this method
+        searchs the egg-base directory for files to include
+        in the manifest. Uses distutils.filelist.findall (which is
+        really the version monkeypatched in by setuptools/__init__.py)
+        to perform the search.
+
+        Since findall records relative paths, prefix the returned
+        paths with cmd.egg_base, so add_default's include_pattern call
+        (which is looking for the absolute cmd.egg_info) will match
+        them.
+        """
+        if cmd.egg_base == os.curdir:
+            # egg-info files were already added by something else
+            return
+
+        discovered = distutils.filelist.findall(cmd.egg_base)
+        resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
+        self.filelist.allfiles.extend(resolved)
+
     def prune_file_list(self):
         build = self.get_finalized_command('build')
         base_dir = self.distribution.get_fullname()
         self.filelist.exclude_pattern(None, prefix=build.build_base)
         self.filelist.exclude_pattern(None, prefix=base_dir)
         sep = re.escape(os.sep)
-        self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
+        self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
+                                      is_regex=1)
 
 
 def write_file(filename, contents):
@@ -304,11 +367,13 @@ def write_file(filename, contents):
     sequence of strings without line terminators) to it.
     """
     contents = "\n".join(contents)
-    if sys.version_info >= (3,):
-        contents = contents.encode("utf-8")
-    f = open(filename, "wb")        # always write POSIX-style manifest
-    f.write(contents)
-    f.close()
+
+    # assuming the contents has been vetted for utf-8 encoding
+    contents = contents.encode("utf-8")
+
+    with open(filename, "wb") as f:  # always write POSIX-style manifest
+        f.write(contents)
+
 
 def write_pkg_info(cmd, basename, filename):
     log.info("writing %s", filename)
@@ -323,10 +388,12 @@ def write_pkg_info(cmd, basename, filename):
         finally:
             metadata.name, metadata.version = oldname, oldver
 
-        safe = getattr(cmd.distribution,'zip_safe',None)
+        safe = getattr(cmd.distribution, 'zip_safe', None)
         from setuptools.command import bdist_egg
+
         bdist_egg.write_safety_flag(cmd.egg_info, safe)
 
+
 def warn_depends_obsolete(cmd, basename, filename):
     if os.path.exists(filename):
         log.warn(
@@ -335,55 +402,75 @@ def warn_depends_obsolete(cmd, basename, filename):
         )
 
 
+def _write_requirements(stream, reqs):
+    lines = yield_lines(reqs or ())
+    append_cr = lambda line: line + '\n'
+    lines = map(append_cr, lines)
+    stream.writelines(lines)
+
+
 def write_requirements(cmd, basename, filename):
     dist = cmd.distribution
-    data = ['\n'.join(yield_lines(dist.install_requires or ()))]
-    for extra,reqs in (dist.extras_require or {}).items():
-        data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
-    cmd.write_or_delete_file("requirements", filename, ''.join(data))
+    data = StringIO()
+    _write_requirements(data, dist.install_requires)
+    extras_require = dist.extras_require or {}
+    for extra in sorted(extras_require):
+        data.write('\n[{extra}]\n'.format(**vars()))
+        _write_requirements(data, extras_require[extra])
+    cmd.write_or_delete_file("requirements", filename, data.getvalue())
+
+
+def write_setup_requirements(cmd, basename, filename):
+    data = StringIO()
+    _write_requirements(data, cmd.distribution.setup_requires)
+    cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
+
 
 def write_toplevel_names(cmd, basename, filename):
     pkgs = dict.fromkeys(
         [
-            k.split('.',1)[0]
+            k.split('.', 1)[0]
             for k in cmd.distribution.iter_distribution_names()
         ]
     )
-    cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
+    cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
 
 
 def overwrite_arg(cmd, basename, filename):
     write_arg(cmd, basename, filename, True)
 
+
 def write_arg(cmd, basename, filename, force=False):
     argname = os.path.splitext(basename)[0]
     value = getattr(cmd.distribution, argname, None)
     if value is not None:
-        value = '\n'.join(value)+'\n'
+        value = '\n'.join(value) + '\n'
     cmd.write_or_delete_file(argname, filename, value, force)
 
+
 def write_entries(cmd, basename, filename):
     ep = cmd.distribution.entry_points
 
-    if isinstance(ep,basestring) or ep is None:
+    if isinstance(ep, basestring) or ep is None:
         data = ep
     elif ep is not None:
         data = []
-        for section, contents in ep.items():
-            if not isinstance(contents,basestring):
+        for section, contents in sorted(ep.items()):
+            if not isinstance(contents, basestring):
                 contents = EntryPoint.parse_group(section, contents)
-                contents = '\n'.join(map(str,contents.values()))
-            data.append('[%s]\n%s\n\n' % (section,contents))
+                contents = '\n'.join(sorted(map(str, contents.values())))
+            data.append('[%s]\n%s\n\n' % (section, contents))
         data = ''.join(data)
 
     cmd.write_or_delete_file('entry points', filename, data, True)
 
+
 def get_pkg_info_revision():
     # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
     # a subversion revision
     #
     if os.path.exists('PKG-INFO'):
-        f = open('PKG-INFO','rU')
+        f = open('PKG-INFO', 'rU')
         for line in f:
             match = re.match(r"Version:.*-r(\d+)\s*$", line)
             if match:
diff --git a/awx/lib/site-packages/setuptools/command/install.py b/awx/lib/site-packages/setuptools/command/install.py
index 459cd3cd59..d2bca2ec59 100644
--- a/awx/lib/site-packages/setuptools/command/install.py
+++ b/awx/lib/site-packages/setuptools/command/install.py
@@ -1,18 +1,26 @@
-import setuptools
-import sys
-import glob
-from distutils.command.install import install as _install
 from distutils.errors import DistutilsArgError
+import inspect
+import glob
+import warnings
+import platform
+import distutils.command.install as orig
 
-class install(_install):
+import setuptools
+
+# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
+# now. See https://bitbucket.org/pypa/setuptools/issue/199/
+_install = orig.install
+
+
+class install(orig.install):
     """Use easy_install to install the package, w/dependencies"""
 
-    user_options = _install.user_options + [
+    user_options = orig.install.user_options + [
         ('old-and-unmanageable', None, "Try not to use this!"),
         ('single-version-externally-managed', None,
-            "used by system package builders to create 'flat' eggs"),
+         "used by system package builders to create 'flat' eggs"),
     ]
-    boolean_options = _install.boolean_options + [
+    boolean_options = orig.install.boolean_options + [
         'old-and-unmanageable', 'single-version-externally-managed',
     ]
     new_commands = [
@@ -22,13 +30,12 @@ class install(_install):
     _nc = dict(new_commands)
 
     def initialize_options(self):
-        _install.initialize_options(self)
+        orig.install.initialize_options(self)
         self.old_and_unmanageable = None
         self.single_version_externally_managed = None
-        self.no_compile = None  # make DISTUTILS_DEBUG work right!
 
     def finalize_options(self):
-        _install.finalize_options(self)
+        orig.install.finalize_options(self)
         if self.root:
             self.single_version_externally_managed = True
         elif self.single_version_externally_managed:
@@ -41,7 +48,7 @@ class install(_install):
     def handle_extra_path(self):
         if self.root or self.single_version_externally_managed:
             # explicit backward-compatibility mode, allow extra_path to work
-            return _install.handle_extra_path(self)
+            return orig.install.handle_extra_path(self)
 
         # Ignore extra_path when installing an egg (or being run by another
         # command without --root or --single-version-externally-managed
@@ -51,28 +58,41 @@ class install(_install):
     def run(self):
         # Explicit request for old-style install?  Just do it
         if self.old_and_unmanageable or self.single_version_externally_managed:
-            return _install.run(self)
+            return orig.install.run(self)
 
-        # Attempt to detect whether we were called from setup() or by another
-        # command.  If we were called by setup(), our caller will be the
-        # 'run_command' method in 'distutils.dist', and *its* caller will be
-        # the 'run_commands' method.  If we were called any other way, our
-        # immediate caller *might* be 'run_command', but it won't have been
-        # called by 'run_commands'.  This is slightly kludgy, but seems to
-        # work.
-        #
-        caller = sys._getframe(2)
-        caller_module = caller.f_globals.get('__name__','')
-        caller_name = caller.f_code.co_name
-
-        if caller_module != 'distutils.dist' or caller_name!='run_commands':
-            # We weren't called from the command line or setup(), so we
-            # should run in backward-compatibility mode to support bdist_*
-            # commands.
-            _install.run(self)
+        if not self._called_from_setup(inspect.currentframe()):
+            # Run in backward-compatibility mode to support bdist_* commands.
+            orig.install.run(self)
         else:
             self.do_egg_install()
 
+    @staticmethod
+    def _called_from_setup(run_frame):
+        """
+        Attempt to detect whether run() was called from setup() or by another
+        command.  If called by setup(), the parent caller will be the
+        'run_command' method in 'distutils.dist', and *its* caller will be
+        the 'run_commands' method.  If called any other way, the
+        immediate caller *might* be 'run_command', but it won't have been
+        called by 'run_commands'. Return True in that case or if a call stack
+        is unavailable. Return False otherwise.
+        """
+        if run_frame is None:
+            msg = "Call stack not available. bdist_* commands may fail."
+            warnings.warn(msg)
+            if platform.python_implementation() == 'IronPython':
+                msg = "For best results, pass -X:Frames to enable call stack."
+                warnings.warn(msg)
+            return True
+        res = inspect.getouterframes(run_frame)[2]
+        caller, = res[:1]
+        info = inspect.getframeinfo(caller)
+        caller_module = caller.f_globals.get('__name__', '')
+        return (
+            caller_module == 'distutils.dist'
+            and info.function == 'run_commands'
+        )
+
     def do_egg_install(self):
 
         easy_install = self.distribution.get_command_class('easy_install')
@@ -97,7 +117,9 @@ class install(_install):
         cmd.run()
         setuptools.bootstrap_install_from = None
 
+
 # XXX Python 3.1 doesn't see _nc if this is inside the class
-install.sub_commands = [
-        cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
-    ] + install.new_commands
+install.sub_commands = (
+    [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
+    install.new_commands
+)
diff --git a/awx/lib/site-packages/setuptools/command/install_egg_info.py b/awx/lib/site-packages/setuptools/command/install_egg_info.py
index f44b34b555..fd0f118b33 100644
--- a/awx/lib/site-packages/setuptools/command/install_egg_info.py
+++ b/awx/lib/site-packages/setuptools/command/install_egg_info.py
@@ -1,7 +1,10 @@
+from distutils import log, dir_util
+import os
+
 from setuptools import Command
 from setuptools.archive_util import unpack_archive
-from distutils import log, dir_util
-import os, shutil, pkg_resources
+import pkg_resources
+
 
 class install_egg_info(Command):
     """Install an .egg-info directory for the package"""
@@ -16,26 +19,26 @@ class install_egg_info(Command):
         self.install_dir = None
 
     def finalize_options(self):
-        self.set_undefined_options('install_lib',('install_dir','install_dir'))
+        self.set_undefined_options('install_lib',
+                                   ('install_dir', 'install_dir'))
         ei_cmd = self.get_finalized_command("egg_info")
         basename = pkg_resources.Distribution(
             None, None, ei_cmd.egg_name, ei_cmd.egg_version
-        ).egg_name()+'.egg-info'
+        ).egg_name() + '.egg-info'
         self.source = ei_cmd.egg_info
         self.target = os.path.join(self.install_dir, basename)
         self.outputs = [self.target]
 
     def run(self):
         self.run_command('egg_info')
-        target = self.target
         if os.path.isdir(self.target) and not os.path.islink(self.target):
             dir_util.remove_tree(self.target, dry_run=self.dry_run)
         elif os.path.exists(self.target):
-            self.execute(os.unlink,(self.target,),"Removing "+self.target)
+            self.execute(os.unlink, (self.target,), "Removing " + self.target)
         if not self.dry_run:
             pkg_resources.ensure_directory(self.target)
-        self.execute(self.copytree, (),
-            "Copying %s to %s" % (self.source, self.target)
+        self.execute(
+            self.copytree, (), "Copying %s to %s" % (self.source, self.target)
         )
         self.install_namespaces()
 
@@ -44,82 +47,70 @@ class install_egg_info(Command):
 
     def copytree(self):
         # Copy the .egg-info tree to site-packages
-        def skimmer(src,dst):
+        def skimmer(src, dst):
             # filter out source-control directories; note that 'src' is always
             # a '/'-separated path, regardless of platform.  'dst' is a
             # platform-specific path.
-            for skip in '.svn/','CVS/':
-                if src.startswith(skip) or '/'+skip in src:
+            for skip in '.svn/', 'CVS/':
+                if src.startswith(skip) or '/' + skip in src:
                     return None
             self.outputs.append(dst)
             log.debug("Copying %s to %s", src, dst)
             return dst
+
         unpack_archive(self.source, self.target, skimmer)
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
     def install_namespaces(self):
         nsp = self._get_all_ns_packages()
-        if not nsp: return
-        filename,ext = os.path.splitext(self.target)
-        filename += '-nspkg.pth'; self.outputs.append(filename)
-        log.info("Installing %s",filename)
-        if not self.dry_run:
-            f = open(filename,'wt')
-            for pkg in nsp:
-                # ensure pkg is not a unicode string under Python 2.7
-                pkg = str(pkg)
-                pth = tuple(pkg.split('.'))
-                trailer = '\n'
-                if '.' in pkg:
-                    trailer = (
-                        "; m and setattr(sys.modules[%r], %r, m)\n"
-                        % ('.'.join(pth[:-1]), pth[-1])
-                    )
-                f.write(
-                    "import sys,types,os; "
-                    "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
-                        "*%(pth)r); "
-                    "ie = os.path.exists(os.path.join(p,'__init__.py')); "
-                    "m = not ie and "
-                        "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
-                    "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
-                    "(p not in mp) and mp.append(p)%(trailer)s"
-                    % locals()
-                )
-            f.close()
+        if not nsp:
+            return
+        filename, ext = os.path.splitext(self.target)
+        filename += '-nspkg.pth'
+        self.outputs.append(filename)
+        log.info("Installing %s", filename)
+        lines = map(self._gen_nspkg_line, nsp)
+
+        if self.dry_run:
+            # always generate the lines, even in dry run
+            list(lines)
+            return
+
+        with open(filename, 'wt') as f:
+            f.writelines(lines)
+
+    _nspkg_tmpl = (
+        "import sys, types, os",
+        "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
+        "ie = os.path.exists(os.path.join(p,'__init__.py'))",
+        "m = not ie and "
+            "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
+        "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
+        "(p not in mp) and mp.append(p)",
+    )
+    "lines for the namespace installer"
+
+    _nspkg_tmpl_multi = (
+        'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
+    )
+    "additional line(s) when a parent package is indicated"
+
+    @classmethod
+    def _gen_nspkg_line(cls, pkg):
+        # ensure pkg is not a unicode string under Python 2.7
+        pkg = str(pkg)
+        pth = tuple(pkg.split('.'))
+        tmpl_lines = cls._nspkg_tmpl
+        parent, sep, child = pkg.rpartition('.')
+        if parent:
+            tmpl_lines += cls._nspkg_tmpl_multi
+        return ';'.join(tmpl_lines) % locals() + '\n'
 
     def _get_all_ns_packages(self):
-        nsp = {}
+        """Return sorted list of all package namespaces"""
+        nsp = set()
         for pkg in self.distribution.namespace_packages or []:
             pkg = pkg.split('.')
             while pkg:
-                nsp['.'.join(pkg)] = 1
+                nsp.add('.'.join(pkg))
                 pkg.pop()
-        nsp=list(nsp)
-        nsp.sort()  # set up shorter names first
-        return nsp
-
-
+        return sorted(nsp)
diff --git a/awx/lib/site-packages/setuptools/command/install_lib.py b/awx/lib/site-packages/setuptools/command/install_lib.py
index 82afa1421b..9b7722276b 100644
--- a/awx/lib/site-packages/setuptools/command/install_lib.py
+++ b/awx/lib/site-packages/setuptools/command/install_lib.py
@@ -1,21 +1,11 @@
-from distutils.command.install_lib import install_lib as _install_lib
 import os
+import imp
+from itertools import product, starmap
+import distutils.command.install_lib as orig
 
-class install_lib(_install_lib):
+class install_lib(orig.install_lib):
     """Don't add compiled flags to filenames of non-Python files"""
 
-    def _bytecode_filenames (self, py_filenames):
-        bytecode_files = []
-        for py_file in py_filenames:
-            if not py_file.endswith('.py'):
-                continue
-            if self.compile:
-                bytecode_files.append(py_file + "c")
-            if self.optimize > 0:
-                bytecode_files.append(py_file + "o")
-
-        return bytecode_files
-
     def run(self):
         self.build()
         outfiles = self.install()
@@ -24,30 +14,81 @@ class install_lib(_install_lib):
             self.byte_compile(outfiles)
 
     def get_exclusions(self):
-        exclude = {}
-        nsp = self.distribution.namespace_packages
+        """
+        Return a collections.Sized collections.Container of paths to be
+        excluded for single_version_externally_managed installations.
+        """
+        all_packages = (
+            pkg
+            for ns_pkg in self._get_SVEM_NSPs()
+            for pkg in self._all_packages(ns_pkg)
+        )
 
-        if (nsp and self.get_finalized_command('install')
-               .single_version_externally_managed
-        ):
-            for pkg in nsp:
-                parts = pkg.split('.')
-                while parts:
-                    pkgdir = os.path.join(self.install_dir, *parts)
-                    for f in '__init__.py', '__init__.pyc', '__init__.pyo':
-                        exclude[os.path.join(pkgdir,f)] = 1
-                    parts.pop()
-        return exclude
+        excl_specs = product(all_packages, self._gen_exclusion_paths())
+        return set(starmap(self._exclude_pkg_path, excl_specs))
+
+    def _exclude_pkg_path(self, pkg, exclusion_path):
+        """
+        Given a package name and exclusion path within that package,
+        compute the full exclusion path.
+        """
+        parts = pkg.split('.') + [exclusion_path]
+        return os.path.join(self.install_dir, *parts)
+
+    @staticmethod
+    def _all_packages(pkg_name):
+        """
+        >>> list(install_lib._all_packages('foo.bar.baz'))
+        ['foo.bar.baz', 'foo.bar', 'foo']
+        """
+        while pkg_name:
+            yield pkg_name
+            pkg_name, sep, child = pkg_name.rpartition('.')
+
+    def _get_SVEM_NSPs(self):
+        """
+        Get namespace packages (list) but only for
+        single_version_externally_managed installations and empty otherwise.
+        """
+        # TODO: is it necessary to short-circuit here? i.e. what's the cost
+        # if get_finalized_command is called even when namespace_packages is
+        # False?
+        if not self.distribution.namespace_packages:
+            return []
+
+        install_cmd = self.get_finalized_command('install')
+        svem = install_cmd.single_version_externally_managed
+
+        return self.distribution.namespace_packages if svem else []
+
+    @staticmethod
+    def _gen_exclusion_paths():
+        """
+        Generate file paths to be excluded for namespace packages (bytecode
+        cache files).
+        """
+        # always exclude the package module itself
+        yield '__init__.py'
+
+        yield '__init__.pyc'
+        yield '__init__.pyo'
+
+        if not hasattr(imp, 'get_tag'):
+            return
+
+        base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
+        yield base + '.pyc'
+        yield base + '.pyo'
 
     def copy_tree(
-        self, infile, outfile,
-        preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+            self, infile, outfile,
+            preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
     ):
         assert preserve_mode and preserve_times and not preserve_symlinks
         exclude = self.get_exclusions()
 
         if not exclude:
-            return _install_lib.copy_tree(self, infile, outfile)
+            return orig.install_lib.copy_tree(self, infile, outfile)
 
         # Exclude namespace package __init__.py* files from the output
 
@@ -58,7 +99,8 @@ class install_lib(_install_lib):
 
         def pf(src, dst):
             if dst in exclude:
-                log.warn("Skipping installation of %s (namespace package)",dst)
+                log.warn("Skipping installation of %s (namespace package)",
+                         dst)
                 return False
 
             log.info("copying %s -> %s", src, os.path.dirname(dst))
@@ -69,14 +111,8 @@ class install_lib(_install_lib):
         return outfiles
 
     def get_outputs(self):
-        outputs = _install_lib.get_outputs(self)
+        outputs = orig.install_lib.get_outputs(self)
         exclude = self.get_exclusions()
         if exclude:
             return [f for f in outputs if f not in exclude]
         return outputs
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/command/install_scripts.py b/awx/lib/site-packages/setuptools/command/install_scripts.py
index 105dabca6a..be66cb2252 100644
--- a/awx/lib/site-packages/setuptools/command/install_scripts.py
+++ b/awx/lib/site-packages/setuptools/command/install_scripts.py
@@ -1,23 +1,23 @@
-from distutils.command.install_scripts import install_scripts \
-     as _install_scripts
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-import os
 from distutils import log
+import distutils.command.install_scripts as orig
+import os
 
-class install_scripts(_install_scripts):
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+
+
+class install_scripts(orig.install_scripts):
     """Do normal script install, plus any egg_info wrapper scripts"""
 
     def initialize_options(self):
-        _install_scripts.initialize_options(self)
+        orig.install_scripts.initialize_options(self)
         self.no_ep = False
 
     def run(self):
-        from setuptools.command.easy_install import get_script_args
-        from setuptools.command.easy_install import sys_executable
+        import setuptools.command.easy_install as ei
 
         self.run_command("egg_info")
         if self.distribution.scripts:
-            _install_scripts.run(self)  # run first to set up self.outfiles
+            orig.install_scripts.run(self)  # run first to set up self.outfiles
         else:
             self.outfiles = []
         if self.no_ep:
@@ -30,16 +30,23 @@ class install_scripts(_install_scripts):
             ei_cmd.egg_name, ei_cmd.egg_version,
         )
         bs_cmd = self.get_finalized_command('build_scripts')
-        executable = getattr(bs_cmd,'executable',sys_executable)
-        is_wininst = getattr(
-            self.get_finalized_command("bdist_wininst"), '_is_running', False
-        )
-        for args in get_script_args(dist, executable, is_wininst):
+        exec_param = getattr(bs_cmd, 'executable', None)
+        bw_cmd = self.get_finalized_command("bdist_wininst")
+        is_wininst = getattr(bw_cmd, '_is_running', False)
+        writer = ei.ScriptWriter
+        if is_wininst:
+            exec_param = "python.exe"
+            writer = ei.WindowsScriptWriter
+        # resolve the writer to the environment
+        writer = writer.best()
+        cmd = writer.command_spec_class.best().from_param(exec_param)
+        for args in writer.get_args(dist, cmd.as_header()):
             self.write_script(*args)
 
     def write_script(self, script_name, contents, mode="t", *ignored):
         """Write an executable file to the scripts directory"""
         from setuptools.command.easy_install import chmod, current_umask
+
         log.info("Installing %s script to %s", script_name, self.install_dir)
         target = os.path.join(self.install_dir, script_name)
         self.outfiles.append(target)
@@ -47,8 +54,7 @@ class install_scripts(_install_scripts):
         mask = current_umask()
         if not self.dry_run:
             ensure_directory(target)
-            f = open(target,"w"+mode)
+            f = open(target, "w" + mode)
             f.write(contents)
             f.close()
-            chmod(target, 0x1FF-mask)  # 0777
-
+            chmod(target, 0o777 - mask)
diff --git a/awx/lib/site-packages/setuptools/command/launcher manifest.xml b/awx/lib/site-packages/setuptools/command/launcher manifest.xml
index 844d2264cd..5972a96d8d 100644
--- a/awx/lib/site-packages/setuptools/command/launcher manifest.xml	
+++ b/awx/lib/site-packages/setuptools/command/launcher manifest.xml	
@@ -1,15 +1,15 @@
 
 
-
+    
     
     
-    
-        
-            
-        
-    
+        
+            
+                
+            
+        
     
 
diff --git a/awx/lib/site-packages/setuptools/command/register.py b/awx/lib/site-packages/setuptools/command/register.py
index 3b2e085907..8d6336a14d 100644
--- a/awx/lib/site-packages/setuptools/command/register.py
+++ b/awx/lib/site-packages/setuptools/command/register.py
@@ -1,10 +1,10 @@
-from distutils.command.register import register as _register
+import distutils.command.register as orig
 
-class register(_register):
-    __doc__ = _register.__doc__
+
+class register(orig.register):
+    __doc__ = orig.register.__doc__
 
     def run(self):
         # Make sure that we are using valid current name/version info
         self.run_command('egg_info')
-        _register.run(self)
-
+        orig.register.run(self)
diff --git a/awx/lib/site-packages/setuptools/command/rotate.py b/awx/lib/site-packages/setuptools/command/rotate.py
index b10acfb41f..1b073620ea 100644
--- a/awx/lib/site-packages/setuptools/command/rotate.py
+++ b/awx/lib/site-packages/setuptools/command/rotate.py
@@ -1,18 +1,20 @@
-import distutils, os
-from setuptools import Command
-from setuptools.compat import basestring
 from distutils.util import convert_path
 from distutils import log
-from distutils.errors import *
+from distutils.errors import DistutilsOptionError
+import os
+
+from setuptools import Command
+from setuptools.compat import basestring
+
 
 class rotate(Command):
     """Delete older distributions"""
 
     description = "delete older distributions, keeping N newest files"
     user_options = [
-        ('match=',    'm', "patterns to match (required)"),
+        ('match=', 'm', "patterns to match (required)"),
         ('dist-dir=', 'd', "directory where the distributions are"),
-        ('keep=',     'k', "number of matching distributions to keep"),
+        ('keep=', 'k', "number of matching distributions to keep"),
     ]
 
     boolean_options = []
@@ -29,7 +31,7 @@ class rotate(Command):
                 "(e.g. '.zip' or '.egg')"
             )
         if self.keep is None:
-            raise DistutilsOptionError("Must specify number of files to keep")           
+            raise DistutilsOptionError("Must specify number of files to keep")
         try:
             self.keep = int(self.keep)
         except ValueError:
@@ -38,46 +40,22 @@ class rotate(Command):
             self.match = [
                 convert_path(p.strip()) for p in self.match.split(',')
             ]
-        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
 
     def run(self):
         self.run_command("egg_info")
         from glob import glob
+
         for pattern in self.match:
-            pattern = self.distribution.get_name()+'*'+pattern
-            files = glob(os.path.join(self.dist_dir,pattern))
-            files = [(os.path.getmtime(f),f) for f in files]
+            pattern = self.distribution.get_name() + '*' + pattern
+            files = glob(os.path.join(self.dist_dir, pattern))
+            files = [(os.path.getmtime(f), f) for f in files]
             files.sort()
             files.reverse()
 
             log.info("%d file(s) matching %s", len(files), pattern)
             files = files[self.keep:]
-            for (t,f) in files:
+            for (t, f) in files:
                 log.info("Deleting %s", f)
                 if not self.dry_run:
                     os.unlink(f)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/command/saveopts.py b/awx/lib/site-packages/setuptools/command/saveopts.py
index 7209be4cd9..611cec5528 100644
--- a/awx/lib/site-packages/setuptools/command/saveopts.py
+++ b/awx/lib/site-packages/setuptools/command/saveopts.py
@@ -1,7 +1,6 @@
-import distutils, os
-from setuptools import Command
 from setuptools.command.setopt import edit_config, option_base
 
+
 class saveopts(option_base):
     """Save command-line options to a file"""
 
@@ -13,12 +12,11 @@ class saveopts(option_base):
 
         for cmd in dist.command_options:
 
-            if cmd=='saveopts':
-                continue    # don't save our own options!
+            if cmd == 'saveopts':
+                continue  # don't save our own options!
 
-            for opt,(src,val) in dist.get_option_dict(cmd).items():
-                if src=="command line":
-                    settings.setdefault(cmd,{})[opt] = val
+            for opt, (src, val) in dist.get_option_dict(cmd).items():
+                if src == "command line":
+                    settings.setdefault(cmd, {})[opt] = val
 
         edit_config(self.filename, settings, self.dry_run)
-
diff --git a/awx/lib/site-packages/setuptools/command/sdist.py b/awx/lib/site-packages/setuptools/command/sdist.py
index 76e1c5f18b..851a177524 100644
--- a/awx/lib/site-packages/setuptools/command/sdist.py
+++ b/awx/lib/site-packages/setuptools/command/sdist.py
@@ -1,16 +1,17 @@
-import os
-import re
-import sys
 from glob import glob
+from distutils import log
+import distutils.command.sdist as orig
+import os
+import sys
+
+from setuptools.compat import PY3
+from setuptools.utils import cs_path_exists
 
 import pkg_resources
-from distutils.command.sdist import sdist as _sdist
-from distutils.util import convert_path
-from distutils import log
-from setuptools import svn_utils
 
-READMES = ('README', 'README.rst', 'README.txt')
+READMES = 'README', 'README.rst', 'README.txt'
 
+_default_revctrl = list
 
 def walk_revctrl(dirname=''):
     """Find all files under revision control"""
@@ -19,60 +20,7 @@ def walk_revctrl(dirname=''):
             yield item
 
 
-#TODO will need test case
-class re_finder(object):
-    """
-    Finder that locates files based on entries in a file matched by a
-    regular expression.
-    """
-
-    def __init__(self, path, pattern, postproc=lambda x: x):
-        self.pattern = pattern
-        self.postproc = postproc
-        self.entries_path = convert_path(path)
-
-    def _finder(self, dirname, filename):
-        f = open(filename,'rU')
-        try:
-            data = f.read()
-        finally:
-            f.close()
-        for match in self.pattern.finditer(data):
-            path = match.group(1)
-            # postproc was formerly used when the svn finder
-            # was an re_finder for calling unescape
-            path = self.postproc(path)
-            yield svn_utils.joinpath(dirname, path)
-
-    def find(self, dirname=''):
-        path = svn_utils.joinpath(dirname, self.entries_path)
-
-        if not os.path.isfile(path):
-            # entries file doesn't exist
-            return
-        for path in self._finder(dirname,path):
-            if os.path.isfile(path):
-                yield path
-            elif os.path.isdir(path):
-                for item in self.find(path):
-                    yield item
-    __call__ = find
-
-
-def _default_revctrl(dirname=''):
-    'Primary svn_cvs entry point'
-    for finder in finders:
-        for item in finder(dirname):
-            yield item
-
-
-finders = [
-    re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)),
-    svn_utils.svn_finder,
-]
-
-
-class sdist(_sdist):
+class sdist(orig.sdist):
     """Smart sdist that finds anything supported by revision control"""
 
     user_options = [
@@ -84,7 +32,7 @@ class sdist(_sdist):
         ('dist-dir=', 'd',
          "directory to put the source distribution archive(s) in "
          "[default: dist]"),
-        ]
+    ]
 
     negative_opt = {}
 
@@ -92,7 +40,7 @@ class sdist(_sdist):
         self.run_command('egg_info')
         ei_cmd = self.get_finalized_command('egg_info')
         self.filelist = ei_cmd.filelist
-        self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
+        self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
         self.check_readme()
 
         # Run sub commands
@@ -102,12 +50,13 @@ class sdist(_sdist):
         # Call check_metadata only if no 'check' command
         # (distutils <= 2.6)
         import distutils.command
+
         if 'check' not in distutils.command.__all__:
             self.check_metadata()
 
         self.make_distribution()
 
-        dist_files = getattr(self.distribution,'dist_files',[])
+        dist_files = getattr(self.distribution, 'dist_files', [])
         for file in self.archive_files:
             data = ('sdist', '', file)
             if data not in dist_files:
@@ -119,17 +68,19 @@ class sdist(_sdist):
         # Doing so prevents an error when easy_install attempts to delete the
         #  file.
         try:
-            _sdist.read_template(self)
+            orig.sdist.read_template(self)
         except:
-            sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
+            _, _, tb = sys.exc_info()
+            tb.tb_next.tb_frame.f_locals['template'].close()
             raise
+
     # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
     #  has been fixed, so only override the method if we're using an earlier
     #  Python.
     has_leaky_handle = (
-        sys.version_info < (2,7,2)
-        or (3,0) <= sys.version_info < (3,1,4)
-        or (3,2) <= sys.version_info < (3,2,1)
+        sys.version_info < (2, 7, 2)
+        or (3, 0) <= sys.version_info < (3, 1, 4)
+        or (3, 2) <= sys.version_info < (3, 2, 1)
     )
     if has_leaky_handle:
         read_template = __read_template_hack
@@ -142,7 +93,7 @@ class sdist(_sdist):
                 alts = fn
                 got_it = 0
                 for fn in alts:
-                    if os.path.exists(fn):
+                    if cs_path_exists(fn):
                         got_it = 1
                         self.filelist.append(fn)
                         break
@@ -151,14 +102,14 @@ class sdist(_sdist):
                     self.warn("standard file not found: should have one of " +
                               ', '.join(alts))
             else:
-                if os.path.exists(fn):
+                if cs_path_exists(fn):
                     self.filelist.append(fn)
                 else:
                     self.warn("standard file '%s' not found" % fn)
 
         optional = ['test/test*.py', 'setup.cfg']
         for pattern in optional:
-            files = list(filter(os.path.isfile, glob(pattern)))
+            files = list(filter(cs_path_exists, glob(pattern)))
             if files:
                 self.filelist.extend(files)
 
@@ -193,15 +144,16 @@ class sdist(_sdist):
                 return
         else:
             self.warn(
-                "standard file not found: should have one of " +', '.join(READMES)
+                "standard file not found: should have one of " +
+                ', '.join(READMES)
             )
 
     def make_release_tree(self, base_dir, files):
-        _sdist.make_release_tree(self, base_dir, files)
+        orig.sdist.make_release_tree(self, base_dir, files)
 
         # Save any egg_info command line options used to create this sdist
         dest = os.path.join(base_dir, 'setup.cfg')
-        if hasattr(os,'link') and os.path.exists(dest):
+        if hasattr(os, 'link') and os.path.exists(dest):
             # unlink and re-copy, since it might be hard-linked, and
             # we don't want to change the source version
             os.unlink(dest)
@@ -219,7 +171,8 @@ class sdist(_sdist):
             first_line = fp.readline()
         finally:
             fp.close()
-        return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
+        return (first_line !=
+                '# file GENERATED by distutils, do NOT edit\n'.encode())
 
     def read_manifest(self):
         """Read the manifest file (named by 'self.manifest') and use it to
@@ -230,7 +183,7 @@ class sdist(_sdist):
         manifest = open(self.manifest, 'rbU')
         for line in manifest:
             # The manifest must contain UTF-8. See #303.
-            if sys.version_info >= (3,):
+            if PY3:
                 try:
                     line = line.decode('UTF-8')
                 except UnicodeDecodeError:
diff --git a/awx/lib/site-packages/setuptools/command/setopt.py b/awx/lib/site-packages/setuptools/command/setopt.py
index aa468c88fe..a04d6032ad 100644
--- a/awx/lib/site-packages/setuptools/command/setopt.py
+++ b/awx/lib/site-packages/setuptools/command/setopt.py
@@ -1,8 +1,11 @@
-import distutils, os
-from setuptools import Command
 from distutils.util import convert_path
 from distutils import log
-from distutils.errors import *
+from distutils.errors import DistutilsOptionError
+import distutils
+import os
+
+from setuptools import Command
+
 
 __all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
 
@@ -12,33 +15,20 @@ def config_file(kind="local"):
 
     `kind` must be one of "local", "global", or "user"
     """
-    if kind=='local':
+    if kind == 'local':
         return 'setup.cfg'
-    if kind=='global':
+    if kind == 'global':
         return os.path.join(
-            os.path.dirname(distutils.__file__),'distutils.cfg'
+            os.path.dirname(distutils.__file__), 'distutils.cfg'
         )
-    if kind=='user':
-        dot = os.name=='posix' and '.' or ''
+    if kind == 'user':
+        dot = os.name == 'posix' and '.' or ''
         return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
     raise ValueError(
         "config_file() type must be 'local', 'global', or 'user'", kind
     )
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
 def edit_config(filename, settings, dry_run=False):
     """Edit a configuration file to include `settings`
 
@@ -48,6 +38,7 @@ def edit_config(filename, settings, dry_run=False):
     A setting of ``None`` means to delete that setting.
     """
     from setuptools.compat import ConfigParser
+
     log.debug("Reading configuration from %s", filename)
     opts = ConfigParser.RawConfigParser()
     opts.read([filename])
@@ -59,46 +50,49 @@ def edit_config(filename, settings, dry_run=False):
             if not opts.has_section(section):
                 log.debug("Adding new section [%s] to %s", section, filename)
                 opts.add_section(section)
-            for option,value in options.items():
+            for option, value in options.items():
                 if value is None:
-                    log.debug("Deleting %s.%s from %s",
+                    log.debug(
+                        "Deleting %s.%s from %s",
                         section, option, filename
                     )
-                    opts.remove_option(section,option)
+                    opts.remove_option(section, option)
                     if not opts.options(section):
                         log.info("Deleting empty [%s] section from %s",
-                                  section, filename)
+                                 section, filename)
                         opts.remove_section(section)
                 else:
                     log.debug(
                         "Setting %s.%s to %r in %s",
                         section, option, value, filename
                     )
-                    opts.set(section,option,value)
+                    opts.set(section, option, value)
 
     log.info("Writing %s", filename)
     if not dry_run:
-        f = open(filename,'w'); opts.write(f); f.close()
+        with open(filename, 'w') as f:
+            opts.write(f)
+
 
 class option_base(Command):
     """Abstract base class for commands that mess with config files"""
-    
+
     user_options = [
         ('global-config', 'g',
-                 "save options to the site-wide distutils.cfg file"),
+         "save options to the site-wide distutils.cfg file"),
         ('user-config', 'u',
-                 "save options to the current user's pydistutils.cfg file"),
+         "save options to the current user's pydistutils.cfg file"),
         ('filename=', 'f',
-                 "configuration file to use (default=setup.cfg)"),
+         "configuration file to use (default=setup.cfg)"),
     ]
 
     boolean_options = [
         'global-config', 'user-config',
-    ]    
+    ]
 
     def initialize_options(self):
         self.global_config = None
-        self.user_config   = None
+        self.user_config = None
         self.filename = None
 
     def finalize_options(self):
@@ -111,14 +105,12 @@ class option_base(Command):
             filenames.append(self.filename)
         if not filenames:
             filenames.append(config_file('local'))
-        if len(filenames)>1:
+        if len(filenames) > 1:
             raise DistutilsOptionError(
                 "Must specify only one configuration file option",
                 filenames
             )
-        self.filename, = filenames    
-
-
+        self.filename, = filenames
 
 
 class setopt(option_base):
@@ -128,9 +120,9 @@ class setopt(option_base):
 
     user_options = [
         ('command=', 'c', 'command to set an option for'),
-        ('option=',  'o',  'option to set'),
-        ('set-value=',   's', 'value of the option'),
-        ('remove',   'r', 'remove (unset) the value'), 
+        ('option=', 'o', 'option to set'),
+        ('set-value=', 's', 'value of the option'),
+        ('remove', 'r', 'remove (unset) the value'),
     ] + option_base.user_options
 
     boolean_options = option_base.boolean_options + ['remove']
@@ -152,13 +144,7 @@ class setopt(option_base):
     def run(self):
         edit_config(
             self.filename, {
-                self.command: {self.option.replace('-','_'):self.set_value}
+                self.command: {self.option.replace('-', '_'): self.set_value}
             },
             self.dry_run
         )
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/command/test.py b/awx/lib/site-packages/setuptools/command/test.py
index db2fc7b140..42689f7012 100644
--- a/awx/lib/site-packages/setuptools/command/test.py
+++ b/awx/lib/site-packages/setuptools/command/test.py
@@ -1,12 +1,17 @@
-from setuptools import Command
 from distutils.errors import DistutilsOptionError
+from unittest import TestLoader
+import unittest
 import sys
-from pkg_resources import *
-from pkg_resources import _namespace_packages
-from unittest import TestLoader, main
+
+from pkg_resources import (resource_listdir, resource_exists, normalize_path,
+                           working_set, _namespace_packages,
+                           add_activation_listener, require, EntryPoint)
+from setuptools import Command
+from setuptools.compat import PY3
+from setuptools.py31compat import unittest_main
+
 
 class ScanningLoader(TestLoader):
-
     def loadTestsFromModule(self, module):
         """Return a suite of all tests cases contained in the given module
 
@@ -15,48 +20,45 @@ class ScanningLoader(TestLoader):
         the return value to the tests.
         """
         tests = []
-        if module.__name__!='setuptools.tests.doctest':  # ugh
-            tests.append(TestLoader.loadTestsFromModule(self,module))
+        tests.append(TestLoader.loadTestsFromModule(self, module))
 
         if hasattr(module, "additional_tests"):
             tests.append(module.additional_tests())
 
         if hasattr(module, '__path__'):
             for file in resource_listdir(module.__name__, ''):
-                if file.endswith('.py') and file!='__init__.py':
-                    submodule = module.__name__+'.'+file[:-3]
+                if file.endswith('.py') and file != '__init__.py':
+                    submodule = module.__name__ + '.' + file[:-3]
                 else:
-                    if resource_exists(
-                        module.__name__, file+'/__init__.py'
-                    ):
-                        submodule = module.__name__+'.'+file
+                    if resource_exists(module.__name__, file + '/__init__.py'):
+                        submodule = module.__name__ + '.' + file
                     else:
                         continue
                 tests.append(self.loadTestsFromName(submodule))
 
-        if len(tests)!=1:
+        if len(tests) != 1:
             return self.suiteClass(tests)
         else:
-            return tests[0] # don't create a nested suite for only one return
+            return tests[0]  # don't create a nested suite for only one return
 
 
 class test(Command):
-
     """Command to run unit tests after in-place build"""
 
     description = "run unit tests after in-place build"
 
     user_options = [
-        ('test-module=','m', "Run 'test_suite' in specified module"),
-        ('test-suite=','s',
-            "Test suite to run (e.g. 'some_module.test_suite')"),
+        ('test-module=', 'm', "Run 'test_suite' in specified module"),
+        ('test-suite=', 's',
+         "Test suite to run (e.g. 'some_module.test_suite')"),
+        ('test-runner=', 'r', "Test runner to use"),
     ]
 
     def initialize_options(self):
         self.test_suite = None
         self.test_module = None
         self.test_loader = None
-
+        self.test_runner = None
 
     def finalize_options(self):
 
@@ -64,7 +66,7 @@ class test(Command):
             if self.test_module is None:
                 self.test_suite = self.distribution.test_suite
             else:
-                self.test_suite = self.test_module+".test_suite"
+                self.test_suite = self.test_module + ".test_suite"
         elif self.test_module:
             raise DistutilsOptionError(
                 "You may specify a module or a suite, but not both"
@@ -73,16 +75,18 @@ class test(Command):
         self.test_args = [self.test_suite]
 
         if self.verbose:
-            self.test_args.insert(0,'--verbose')
+            self.test_args.insert(0, '--verbose')
         if self.test_loader is None:
-            self.test_loader = getattr(self.distribution,'test_loader',None)
+            self.test_loader = getattr(self.distribution, 'test_loader', None)
         if self.test_loader is None:
             self.test_loader = "setuptools.command.test:ScanningLoader"
-
-
+        if self.test_runner is None:
+            self.test_runner = getattr(self.distribution, 'test_runner', None)
 
     def with_project_on_sys_path(self, func):
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+        with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
+
+        if with_2to3:
             # If we run 2to3 we can not do this inplace:
 
             # Ensure metadata is up-to-date
@@ -122,10 +126,10 @@ class test(Command):
             sys.modules.update(old_modules)
             working_set.__init__()
 
-
     def run(self):
         if self.distribution.install_requires:
-            self.distribution.fetch_build_eggs(self.distribution.install_requires)
+            self.distribution.fetch_build_eggs(
+                self.distribution.install_requires)
         if self.distribution.tests_require:
             self.distribution.fetch_build_eggs(self.distribution.tests_require)
 
@@ -137,14 +141,11 @@ class test(Command):
                 self.announce('running "unittest %s"' % cmd)
                 self.with_project_on_sys_path(self.run_tests)
 
-
     def run_tests(self):
-        import unittest
-
         # Purge modules under test from sys.modules. The test loader will
         # re-import them from the build location. Required when 2to3 is used
         # with namespace packages.
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+        if PY3 and getattr(self.distribution, 'use_2to3', False):
             module = self.test_args[-1].split('.')[0]
             if module in _namespace_packages:
                 del_modules = []
@@ -156,43 +157,19 @@ class test(Command):
                         del_modules.append(name)
                 list(map(sys.modules.__delitem__, del_modules))
 
-        loader_ep = EntryPoint.parse("x="+self.test_loader)
-        loader_class = loader_ep.load(require=False)
-        cks = loader_class()
-        unittest.main(
-            None, None, [unittest.__file__]+self.test_args,
-            testLoader = cks
+        unittest_main(
+            None, None, [unittest.__file__] + self.test_args,
+            testLoader=self._resolve_as_ep(self.test_loader),
+            testRunner=self._resolve_as_ep(self.test_runner),
         )
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+    @staticmethod
+    def _resolve_as_ep(val):
+        """
+        Load the indicated attribute value, called, as a as if it were
+        specified as an entry point.
+        """
+        if val is None:
+            return
+        parsed = EntryPoint.parse("x=" + val)
+        return parsed.resolve()()
diff --git a/awx/lib/site-packages/setuptools/command/upload_docs.py b/awx/lib/site-packages/setuptools/command/upload_docs.py
index cad7a52d1b..001ee936e4 100644
--- a/awx/lib/site-packages/setuptools/command/upload_docs.py
+++ b/awx/lib/site-packages/setuptools/command/upload_docs.py
@@ -5,6 +5,10 @@ Implements a Distutils 'upload_docs' subcommand (upload documentation to
 PyPI's pythonhosted.org).
 """
 
+from base64 import standard_b64encode
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.command.upload import upload
 import os
 import socket
 import zipfile
@@ -12,14 +16,9 @@ import tempfile
 import sys
 import shutil
 
-from base64 import standard_b64encode
+from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
 from pkg_resources import iter_entry_points
 
-from distutils import log
-from distutils.errors import DistutilsOptionError
-from distutils.command.upload import upload
-
-from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
 
 errors = 'surrogateescape' if PY3 else 'strict'
 
@@ -33,7 +32,6 @@ def b(s, encoding='utf-8'):
 
 
 class upload_docs(upload):
-
     description = 'Upload documentation to PyPI'
 
     user_options = [
@@ -42,7 +40,7 @@ class upload_docs(upload):
         ('show-response', None,
          'display full response text from server'),
         ('upload-dir=', None, 'directory to upload'),
-        ]
+    ]
     boolean_options = upload.boolean_options
 
     def has_sphinx(self):
@@ -159,7 +157,7 @@ class upload_docs(upload):
         elif schema == 'https':
             conn = httplib.HTTPSConnection(netloc)
         else:
-            raise AssertionError("unsupported schema "+schema)
+            raise AssertionError("unsupported schema " + schema)
 
         data = ''
         try:
@@ -171,8 +169,7 @@ class upload_docs(upload):
             conn.putheader('Authorization', auth)
             conn.endheaders()
             conn.send(body)
-        except socket.error:
-            e = sys.exc_info()[1]
+        except socket.error as e:
             self.announce(str(e), log.ERROR)
             return
 
@@ -190,4 +187,4 @@ class upload_docs(upload):
             self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                           log.ERROR)
         if self.show_response:
-            print('-'*75, r.read(), '-'*75)
+            print('-' * 75, r.read(), '-' * 75)
diff --git a/awx/lib/site-packages/setuptools/compat.py b/awx/lib/site-packages/setuptools/compat.py
index 7b824ba2ff..73e6e4aa7e 100644
--- a/awx/lib/site-packages/setuptools/compat.py
+++ b/awx/lib/site-packages/setuptools/compat.py
@@ -1,15 +1,15 @@
 import sys
 import itertools
 
-if sys.version_info[0] < 3:
-    PY3 = False
+PY3 = sys.version_info >= (3,)
+PY2 = not PY3
 
+if PY2:
     basestring = basestring
     import __builtin__ as builtins
     import ConfigParser
     from StringIO import StringIO
     BytesIO = StringIO
-    execfile = execfile
     func_code = lambda o: o.func_code
     func_globals = lambda o: o.func_globals
     im_func = lambda o: o.im_func
@@ -21,8 +21,6 @@ if sys.version_info[0] < 3:
     iteritems = lambda o: o.iteritems()
     long_type = long
     maxsize = sys.maxint
-    next = lambda o: o.next()
-    numeric_types = (int, long, float)
     unichr = unichr
     unicode = unicode
     bytes = str
@@ -34,9 +32,8 @@ if sys.version_info[0] < 3:
 
     exec("""def reraise(tp, value, tb=None):
     raise tp, value, tb""")
-else:
-    PY3 = True
 
+if PY3:
     basestring = str
     import builtins
     import configparser as ConfigParser
@@ -51,8 +48,6 @@ else:
     iteritems = lambda o: o.items()
     long_type = int
     maxsize = sys.maxsize
-    next = next
-    numeric_types = (int, float)
     unichr = chr
     unicode = str
     bytes = bytes
@@ -65,18 +60,6 @@ else:
     )
     filterfalse = itertools.filterfalse
 
-    def execfile(fn, globs=None, locs=None):
-        if globs is None:
-            globs = globals()
-        if locs is None:
-            locs = globs
-        f = open(fn, 'rb')
-        try:
-            source = f.read()
-        finally:
-            f.close()
-        exec(compile(source, fn, 'exec'), globs, locs)
-
     def reraise(tp, value, tb=None):
         if value.__traceback__ is not tb:
             raise value.with_traceback(tb)
diff --git a/awx/lib/site-packages/setuptools/depends.py b/awx/lib/site-packages/setuptools/depends.py
index 8b9d1217b1..e87ef3f39c 100644
--- a/awx/lib/site-packages/setuptools/depends.py
+++ b/awx/lib/site-packages/setuptools/depends.py
@@ -1,7 +1,9 @@
-from __future__ import generators
-import sys, imp, marshal
+import sys
+import imp
+import marshal
 from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
-from distutils.version import StrictVersion, LooseVersion
+from distutils.version import StrictVersion
+from setuptools import compat
 
 __all__ = [
     'Require', 'find_module', 'get_module_constant', 'extract_constant'
@@ -10,9 +12,8 @@ __all__ = [
 class Require:
     """A prerequisite to building or installing a distribution"""
 
-    def __init__(self,name,requested_version,module,homepage='',
-        attribute=None,format=None
-    ):
+    def __init__(self, name, requested_version, module, homepage='',
+            attribute=None, format=None):
 
         if format is None and requested_version is not None:
             format = StrictVersion
@@ -25,20 +26,17 @@ class Require:
         self.__dict__.update(locals())
         del self.self
 
-
     def full_name(self):
         """Return full package/distribution name, w/version"""
         if self.requested_version is not None:
             return '%s-%s' % (self.name,self.requested_version)
         return self.name
 
-
-    def version_ok(self,version):
+    def version_ok(self, version):
         """Is 'version' sufficiently up-to-date?"""
         return self.attribute is None or self.format is None or \
             str(version) != "unknown" and version >= self.requested_version
 
-
     def get_version(self, paths=None, default="unknown"):
 
         """Get version number of installed module, 'None', or 'default'
@@ -59,20 +57,18 @@ class Require:
             except ImportError:
                 return None
 
-        v = get_module_constant(self.module,self.attribute,default,paths)
+        v = get_module_constant(self.module, self.attribute, default, paths)
 
         if v is not None and v is not default and self.format is not None:
             return self.format(v)
 
         return v
 
-
-    def is_present(self,paths=None):
+    def is_present(self, paths=None):
         """Return true if dependency is present on 'paths'"""
         return self.get_version(paths) is not None
 
-
-    def is_current(self,paths=None):
+    def is_current(self, paths=None):
         """Return true if dependency is present and up-to-date on 'paths'"""
         version = self.get_version(paths)
         if version is None:
@@ -103,7 +99,7 @@ def _iter_code(code):
             ptr += 3
 
             if op==EXTENDED_ARG:
-                extended_arg = arg * long_type(65536)
+                extended_arg = arg * compat.long_type(65536)
                 continue
 
         else:
@@ -113,14 +109,6 @@ def _iter_code(code):
         yield op,arg
 
 
-
-
-
-
-
-
-
-
 def find_module(module, paths=None):
     """Just like 'imp.find_module()', but with package support"""
 
@@ -140,28 +128,6 @@ def find_module(module, paths=None):
     return info
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
 def get_module_constant(module, symbol, default=-1, paths=None):
 
     """Find 'module' by searching 'paths', and extract 'symbol'
@@ -171,7 +137,7 @@ def get_module_constant(module, symbol, default=-1, paths=None):
     constant.  Otherwise, return 'default'."""
 
     try:
-        f, path, (suffix,mode,kind) = find_module(module,paths)
+        f, path, (suffix, mode, kind) = find_module(module, paths)
     except ImportError:
         # Module doesn't exist
         return None
@@ -187,23 +153,17 @@ def get_module_constant(module, symbol, default=-1, paths=None):
         else:
             # Not something we can parse; we'll have to import it.  :(
             if module not in sys.modules:
-                imp.load_module(module,f,path,(suffix,mode,kind))
-            return getattr(sys.modules[module],symbol,None)
+                imp.load_module(module, f, path, (suffix, mode, kind))
+            return getattr(sys.modules[module], symbol, None)
 
     finally:
         if f:
             f.close()
 
-    return extract_constant(code,symbol,default)
+    return extract_constant(code, symbol, default)
 
 
-
-
-
-
-
-
-def extract_constant(code,symbol,default=-1):
+def extract_constant(code, symbol, default=-1):
     """Extract the constant value of 'symbol' from 'code'
 
     If the name 'symbol' is bound to a constant value by the Python code
@@ -236,11 +196,20 @@ def extract_constant(code,symbol,default=-1):
             return const
         else:
             const = default
-            
-if sys.platform.startswith('java') or sys.platform == 'cli':
-    # XXX it'd be better to test assertions about bytecode instead...
-    del extract_constant, get_module_constant
-    __all__.remove('extract_constant')
-    __all__.remove('get_module_constant')
 
 
+def _update_globals():
+    """
+    Patch the globals to remove the objects not available on some platforms.
+
+    XXX it'd be better to test assertions about bytecode instead.
+    """
+
+    if not sys.platform.startswith('java') and sys.platform != 'cli':
+        return
+    incompatible = 'extract_constant', 'get_module_constant'
+    for name in incompatible:
+        del globals()[name]
+        __all__.remove(name)
+
+_update_globals()
diff --git a/awx/lib/site-packages/setuptools/dist.py b/awx/lib/site-packages/setuptools/dist.py
index 0801ae74ff..bc29b131b8 100644
--- a/awx/lib/site-packages/setuptools/dist.py
+++ b/awx/lib/site-packages/setuptools/dist.py
@@ -4,17 +4,23 @@ import re
 import os
 import sys
 import warnings
+import numbers
 import distutils.log
 import distutils.core
 import distutils.cmd
+import distutils.dist
 from distutils.core import Distribution as _Distribution
 from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
     DistutilsSetupError)
 
 from setuptools.depends import Require
-from setuptools.compat import numeric_types, basestring
+from setuptools.compat import basestring, PY2
+from setuptools import windows_support
 import pkg_resources
 
+packaging = pkg_resources.packaging
+
+
 def _get_unpatched(cls):
     """Protect against re-patching the distutils if reloaded
 
@@ -31,6 +37,27 @@ def _get_unpatched(cls):
 
 _Distribution = _get_unpatched(_Distribution)
 
+def _patch_distribution_metadata_write_pkg_info():
+    """
+    Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
+    encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
+    correct this undesirable behavior.
+    """
+    environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
+    if not environment_local:
+        return
+
+    # from Python 3.4
+    def write_pkg_info(self, base_dir):
+        """Write the PKG-INFO file into the release tree.
+        """
+        with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
+                  encoding='UTF-8') as pkg_info:
+            self.write_pkg_file(pkg_info)
+
+    distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info
+_patch_distribution_metadata_write_pkg_info()
+
 sequence = tuple, list
 
 def check_importable(dist, attr, value):
@@ -104,8 +131,7 @@ def check_entry_points(dist, attr, value):
     """Verify that entry_points map is parseable"""
     try:
         pkg_resources.EntryPoint.parse_map(value)
-    except ValueError:
-        e = sys.exc_info()[1]
+    except ValueError as e:
         raise DistutilsSetupError(e)
 
 def check_test_suite(dist, attr, value):
@@ -236,15 +262,36 @@ class Distribution(_Distribution):
             self.dependency_links = attrs.pop('dependency_links', [])
             assert_string_list(self,'dependency_links',self.dependency_links)
         if attrs and 'setup_requires' in attrs:
-            self.fetch_build_eggs(attrs.pop('setup_requires'))
+            self.fetch_build_eggs(attrs['setup_requires'])
         for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
             if not hasattr(self,ep.name):
                 setattr(self,ep.name,None)
         _Distribution.__init__(self,attrs)
-        if isinstance(self.metadata.version, numeric_types):
+        if isinstance(self.metadata.version, numbers.Number):
             # Some people apparently take "version number" too literally :)
             self.metadata.version = str(self.metadata.version)
 
+        if self.metadata.version is not None:
+            try:
+                ver = packaging.version.Version(self.metadata.version)
+                normalized_version = str(ver)
+                if self.metadata.version != normalized_version:
+                    warnings.warn(
+                        "The version specified requires normalization, "
+                        "consider using '%s' instead of '%s'." % (
+                            normalized_version,
+                            self.metadata.version,
+                        )
+                    )
+                    self.metadata.version = normalized_version
+            except (packaging.version.InvalidVersion, TypeError):
+                warnings.warn(
+                    "The version specified (%r) is an invalid version, this "
+                    "may not work as expected with newer versions of "
+                    "setuptools, pip, and PyPI. Please see PEP 440 for more "
+                    "details." % self.metadata.version
+                )
+
     def parse_command_line(self):
         """Process features after parsing command line options"""
         result = _Distribution.parse_command_line(self)
@@ -258,12 +305,13 @@ class Distribution(_Distribution):
 
     def fetch_build_eggs(self, requires):
         """Resolve pre-setup requirements"""
-        from pkg_resources import working_set, parse_requirements
-        for dist in working_set.resolve(
-            parse_requirements(requires), installer=self.fetch_build_egg,
-            replace_conflicting=True
-        ):
-            working_set.add(dist, replace=True)
+        resolved_dists = pkg_resources.working_set.resolve(
+            pkg_resources.parse_requirements(requires),
+            installer=self.fetch_build_egg,
+            replace_conflicting=True,
+        )
+        for dist in resolved_dists:
+            pkg_resources.working_set.add(dist, replace=True)
 
     def finalize_options(self):
         _Distribution.finalize_options(self)
@@ -281,6 +329,21 @@ class Distribution(_Distribution):
         else:
             self.convert_2to3_doctests = []
 
+    def get_egg_cache_dir(self):
+        egg_cache_dir = os.path.join(os.curdir, '.eggs')
+        if not os.path.exists(egg_cache_dir):
+            os.mkdir(egg_cache_dir)
+            windows_support.hide_file(egg_cache_dir)
+            readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
+            with open(readme_txt_filename, 'w') as f:
+                f.write('This directory contains eggs that were downloaded '
+                        'by setuptools to build, test, and run plug-ins.\n\n')
+                f.write('This directory caches those eggs to prevent '
+                        'repeated downloads.\n\n')
+                f.write('However, it is safe to delete this directory.\n\n')
+
+        return egg_cache_dir
+
     def fetch_build_egg(self, req):
         """Fetch an egg needed for building"""
 
@@ -304,8 +367,9 @@ class Distribution(_Distribution):
                 if 'find_links' in opts:
                     links = opts['find_links'][1].split() + links
                 opts['find_links'] = ('setup', links)
+            install_dir = self.get_egg_cache_dir()
             cmd = easy_install(
-                dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
+                dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
                 always_copy=False, build_directory=None, editable=False,
                 upgrade=False, multi_version=True, no_report=True, user=False
             )
@@ -369,7 +433,8 @@ class Distribution(_Distribution):
     def print_commands(self):
         for ep in pkg_resources.iter_entry_points('distutils.commands'):
             if ep.name not in self.cmdclass:
-                cmdclass = ep.load(False) # don't require extras, we're not running
+                # don't require extras as the commands won't be invoked
+                cmdclass = ep.resolve()
                 self.cmdclass[ep.name] = cmdclass
         return _Distribution.print_commands(self)
 
@@ -608,7 +673,7 @@ class Distribution(_Distribution):
         """
         import sys
 
-        if sys.version_info < (3,) or self.help_commands:
+        if PY2 or self.help_commands:
             return _Distribution.handle_display_options(self, option_order)
 
         # Stdout may be StringIO (e.g. in tests)
diff --git a/awx/lib/site-packages/setuptools/extension.py b/awx/lib/site-packages/setuptools/extension.py
index d7892d3d9f..8178ed33d7 100644
--- a/awx/lib/site-packages/setuptools/extension.py
+++ b/awx/lib/site-packages/setuptools/extension.py
@@ -1,11 +1,17 @@
 import sys
+import re
+import functools
 import distutils.core
+import distutils.errors
 import distutils.extension
 
-from setuptools.dist import _get_unpatched
+from .dist import _get_unpatched
+from . import msvc9_support
 
 _Extension = _get_unpatched(distutils.core.Extension)
 
+msvc9_support.patch_for_specialized_compiler()
+
 def have_pyrex():
     """
     Return True if Cython or Pyrex can be imported.
@@ -26,16 +32,21 @@ class Extension(_Extension):
 
     def __init__(self, *args, **kw):
         _Extension.__init__(self, *args, **kw)
-        if not have_pyrex():
-            self._convert_pyx_sources_to_c()
+        self._convert_pyx_sources_to_lang()
 
-    def _convert_pyx_sources_to_c(self):
-        "convert .pyx extensions to .c"
-        def pyx_to_c(source):
-            if source.endswith('.pyx'):
-                source = source[:-4] + '.c'
-            return source
-        self.sources = list(map(pyx_to_c, self.sources))
+    def _convert_pyx_sources_to_lang(self):
+        """
+        Replace sources with .pyx extensions to sources with the target
+        language extension. This mechanism allows language authors to supply
+        pre-converted sources but to prefer the .pyx sources.
+        """
+        if have_pyrex():
+            # the build has Cython, so allow it to compile the .pyx files
+            return
+        lang = self.language or ''
+        target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
+        sub = functools.partial(re.sub, '.pyx$', target_ext)
+        self.sources = list(map(sub, self.sources))
 
 class Library(Extension):
     """Just like a regular Extension, but built as a library instead"""
diff --git a/awx/lib/site-packages/setuptools/msvc9_support.py b/awx/lib/site-packages/setuptools/msvc9_support.py
new file mode 100644
index 0000000000..a69c7474c8
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/msvc9_support.py
@@ -0,0 +1,63 @@
+try:
+    import distutils.msvc9compiler
+except ImportError:
+    pass
+
+unpatched = dict()
+
+def patch_for_specialized_compiler():
+    """
+    Patch functions in distutils.msvc9compiler to use the standalone compiler
+    build for Python (Windows only). Fall back to original behavior when the
+    standalone compiler is not available.
+    """
+    if 'distutils' not in globals():
+        # The module isn't available to be patched
+        return
+
+    if unpatched:
+        # Already patched
+        return
+
+    unpatched.update(vars(distutils.msvc9compiler))
+
+    distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
+    distutils.msvc9compiler.query_vcvarsall = query_vcvarsall
+
+def find_vcvarsall(version):
+    Reg = distutils.msvc9compiler.Reg
+    VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
+    key = VC_BASE % ('', version)
+    try:
+        # Per-user installs register the compiler path here
+        productdir = Reg.get_value(key, "installdir")
+    except KeyError:
+        try:
+            # All-user installs on a 64-bit system register here
+            key = VC_BASE % ('Wow6432Node\\', version)
+            productdir = Reg.get_value(key, "installdir")
+        except KeyError:
+            productdir = None
+
+    if productdir:
+        import os
+        vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+        if os.path.isfile(vcvarsall):
+            return vcvarsall
+
+    return unpatched['find_vcvarsall'](version)
+
+def query_vcvarsall(version, *args, **kwargs):
+    try:
+        return unpatched['query_vcvarsall'](version, *args, **kwargs)
+    except distutils.errors.DistutilsPlatformError as exc:
+        if exc and "vcvarsall.bat" in exc.args[0]:
+            message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
+            if int(version) == 9:
+                # This redirection link is maintained by Microsoft.
+                # Contact vspython@microsoft.com if it needs updating.
+                raise distutils.errors.DistutilsPlatformError(
+                    message + ' Get it from http://aka.ms/vcpython27'
+                )
+            raise distutils.errors.DistutilsPlatformError(message)
+        raise
diff --git a/awx/lib/site-packages/setuptools/package_index.py b/awx/lib/site-packages/setuptools/package_index.py
index 167c34e5b4..5ed19130d7 100644
--- a/awx/lib/site-packages/setuptools/package_index.py
+++ b/awx/lib/site-packages/setuptools/package_index.py
@@ -632,16 +632,15 @@ class PackageIndex(Environment):
                     shutil.copy2(filename, dst)
                     filename=dst
 
-            file = open(os.path.join(tmpdir, 'setup.py'), 'w')
-            file.write(
-                "from setuptools import setup\n"
-                "setup(name=%r, version=%r, py_modules=[%r])\n"
-                % (
-                    dists[0].project_name, dists[0].version,
-                    os.path.splitext(basename)[0]
+            with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
+                file.write(
+                    "from setuptools import setup\n"
+                    "setup(name=%r, version=%r, py_modules=[%r])\n"
+                    % (
+                        dists[0].project_name, dists[0].version,
+                        os.path.splitext(basename)[0]
+                    )
                 )
-            )
-            file.close()
             return filename
 
         elif match:
@@ -660,7 +659,7 @@ class PackageIndex(Environment):
     def _download_to(self, url, filename):
         self.info("Downloading %s", url)
         # Download the file
-        fp, tfp, info = None, None, None
+        fp, info = None, None
         try:
             checker = HashChecker.from_url(url)
             fp = self.open_url(strip_fragment(url))
@@ -677,21 +676,20 @@ class PackageIndex(Environment):
                 sizes = get_all_headers(headers, 'Content-Length')
                 size = max(map(int, sizes))
                 self.reporthook(url, filename, blocknum, bs, size)
-            tfp = open(filename,'wb')
-            while True:
-                block = fp.read(bs)
-                if block:
-                    checker.feed(block)
-                    tfp.write(block)
-                    blocknum += 1
-                    self.reporthook(url, filename, blocknum, bs, size)
-                else:
-                    break
-            self.check_hash(checker, filename, tfp)
+            with open(filename,'wb') as tfp:
+                while True:
+                    block = fp.read(bs)
+                    if block:
+                        checker.feed(block)
+                        tfp.write(block)
+                        blocknum += 1
+                        self.reporthook(url, filename, blocknum, bs, size)
+                    else:
+                        break
+                self.check_hash(checker, filename, tfp)
             return headers
         finally:
             if fp: fp.close()
-            if tfp: tfp.close()
 
     def reporthook(self, url, filename, blocknum, blksize, size):
         pass    # no-op
@@ -701,25 +699,21 @@ class PackageIndex(Environment):
             return local_open(url)
         try:
             return open_with_auth(url, self.opener)
-        except (ValueError, httplib.InvalidURL):
-            v = sys.exc_info()[1]
+        except (ValueError, httplib.InvalidURL) as v:
             msg = ' '.join([str(arg) for arg in v.args])
             if warning:
                 self.warn(warning, msg)
             else:
                 raise DistutilsError('%s %s' % (url, msg))
-        except urllib2.HTTPError:
-            v = sys.exc_info()[1]
+        except urllib2.HTTPError as v:
             return v
-        except urllib2.URLError:
-            v = sys.exc_info()[1]
+        except urllib2.URLError as v:
             if warning:
                 self.warn(warning, v.reason)
             else:
                 raise DistutilsError("Download error for %s: %s"
                                      % (url, v.reason))
-        except httplib.BadStatusLine:
-            v = sys.exc_info()[1]
+        except httplib.BadStatusLine as v:
             if warning:
                 self.warn(warning, v.line)
             else:
@@ -728,8 +722,7 @@ class PackageIndex(Environment):
                     'down, %s' %
                     (url, v.line)
                 )
-        except httplib.HTTPException:
-            v = sys.exc_info()[1]
+        except httplib.HTTPException as v:
             if warning:
                 self.warn(warning, v)
             else:
@@ -1040,9 +1033,8 @@ def local_open(url):
         files = []
         for f in os.listdir(filename):
             if f=='index.html':
-                fp = open(os.path.join(filename,f),'r')
-                body = fp.read()
-                fp.close()
+                with open(os.path.join(filename,f),'r') as fp:
+                    body = fp.read()
                 break
             elif os.path.isdir(os.path.join(filename,f)):
                 f+='/'
diff --git a/awx/lib/site-packages/setuptools/py31compat.py b/awx/lib/site-packages/setuptools/py31compat.py
index dbb324b0e3..c487ac0439 100644
--- a/awx/lib/site-packages/setuptools/py31compat.py
+++ b/awx/lib/site-packages/setuptools/py31compat.py
@@ -1,3 +1,6 @@
+import sys
+import unittest
+
 __all__ = ['get_config_vars', 'get_path']
 
 try:
@@ -9,3 +12,41 @@ except ImportError:
         if name not in ('platlib', 'purelib'):
             raise ValueError("Name must be purelib or platlib")
         return get_python_lib(name=='platlib')
+
+try:
+    # Python >=3.2
+    from tempfile import TemporaryDirectory
+except ImportError:
+    import shutil
+    import tempfile
+    class TemporaryDirectory(object):
+        """"
+        Very simple temporary directory context manager.
+        Will try to delete afterward, but will also ignore OS and similar
+        errors on deletion.
+        """
+        def __init__(self):
+            self.name = None # Handle mkdtemp raising an exception
+            self.name = tempfile.mkdtemp()
+
+        def __enter__(self):
+            return self.name
+
+        def __exit__(self, exctype, excvalue, exctrace):
+            try:
+                shutil.rmtree(self.name, True)
+            except OSError: #removal errors are not the only possible
+                pass
+            self.name = None
+
+
+unittest_main = unittest.main
+
+_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2)
+if _PY31:
+    # on Python 3.1, translate testRunner==None to TextTestRunner
+    # for compatibility with Python 2.6, 2.7, and 3.2+
+    def unittest_main(*args, **kwargs):
+        if 'testRunner' in kwargs and kwargs['testRunner'] is None:
+            kwargs['testRunner'] = unittest.TextTestRunner
+        return unittest.main(*args, **kwargs)
diff --git a/awx/lib/site-packages/setuptools/sandbox.py b/awx/lib/site-packages/setuptools/sandbox.py
index 042c595897..83283ca3dc 100644
--- a/awx/lib/site-packages/setuptools/sandbox.py
+++ b/awx/lib/site-packages/setuptools/sandbox.py
@@ -5,6 +5,8 @@ import operator
 import functools
 import itertools
 import re
+import contextlib
+import pickle
 
 import pkg_resources
 
@@ -20,58 +22,221 @@ _open = open
 from distutils.errors import DistutilsError
 from pkg_resources import working_set
 
-from setuptools.compat import builtins, execfile
+from setuptools import compat
+from setuptools.compat import builtins
 
 __all__ = [
     "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
 ]
 
+def _execfile(filename, globals, locals=None):
+    """
+    Python 3 implementation of execfile.
+    """
+    mode = 'rb'
+    # Python 2.6 compile requires LF for newlines, so use deprecated
+    #  Universal newlines support.
+    if sys.version_info < (2, 7):
+        mode += 'U'
+    with open(filename, mode) as stream:
+        script = stream.read()
+    if locals is None:
+        locals = globals
+    code = compile(script, filename, 'exec')
+    exec(code, globals, locals)
+
+
+@contextlib.contextmanager
+def save_argv():
+    saved = sys.argv[:]
+    try:
+        yield saved
+    finally:
+        sys.argv[:] = saved
+
+
+@contextlib.contextmanager
+def save_path():
+    saved = sys.path[:]
+    try:
+        yield saved
+    finally:
+        sys.path[:] = saved
+
+
+@contextlib.contextmanager
+def override_temp(replacement):
+    """
+    Monkey-patch tempfile.tempdir with replacement, ensuring it exists
+    """
+    if not os.path.isdir(replacement):
+        os.makedirs(replacement)
+
+    saved = tempfile.tempdir
+
+    tempfile.tempdir = replacement
+
+    try:
+        yield
+    finally:
+        tempfile.tempdir = saved
+
+
+@contextlib.contextmanager
+def pushd(target):
+    saved = os.getcwd()
+    os.chdir(target)
+    try:
+        yield saved
+    finally:
+        os.chdir(saved)
+
+
+class UnpickleableException(Exception):
+    """
+    An exception representing another Exception that could not be pickled.
+    """
+    @classmethod
+    def dump(cls, type, exc):
+        """
+        Always return a dumped (pickled) type and exc. If exc can't be pickled,
+        wrap it in UnpickleableException first.
+        """
+        try:
+            return pickle.dumps(type), pickle.dumps(exc)
+        except Exception:
+            return cls.dump(cls, cls(repr(exc)))
+
+
+class ExceptionSaver:
+    """
+    A Context Manager that will save an exception, serialized, and restore it
+    later.
+    """
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, exc, tb):
+        if not exc:
+            return
+
+        # dump the exception
+        self._saved = UnpickleableException.dump(type, exc)
+        self._tb = tb
+
+        # suppress the exception
+        return True
+
+    def resume(self):
+        "restore and re-raise any exception"
+
+        if '_saved' not in vars(self):
+            return
+
+        type, exc = map(pickle.loads, self._saved)
+        compat.reraise(type, exc, self._tb)
+
+
+@contextlib.contextmanager
+def save_modules():
+    """
+    Context in which imported modules are saved.
+
+    Translates exceptions internal to the context into the equivalent exception
+    outside the context.
+    """
+    saved = sys.modules.copy()
+    with ExceptionSaver() as saved_exc:
+        yield saved
+
+    sys.modules.update(saved)
+    # remove any modules imported since
+    del_modules = (
+        mod_name for mod_name in sys.modules
+        if mod_name not in saved
+        # exclude any encodings modules. See #285
+        and not mod_name.startswith('encodings.')
+    )
+    _clear_modules(del_modules)
+
+    saved_exc.resume()
+
+
+def _clear_modules(module_names):
+    for mod_name in list(module_names):
+        del sys.modules[mod_name]
+
+
+@contextlib.contextmanager
+def save_pkg_resources_state():
+    saved = pkg_resources.__getstate__()
+    try:
+        yield saved
+    finally:
+        pkg_resources.__setstate__(saved)
+
+
+@contextlib.contextmanager
+def setup_context(setup_dir):
+    temp_dir = os.path.join(setup_dir, 'temp')
+    with save_pkg_resources_state():
+        with save_modules():
+            hide_setuptools()
+            with save_path():
+                with save_argv():
+                    with override_temp(temp_dir):
+                        with pushd(setup_dir):
+                            # ensure setuptools commands are available
+                            __import__('setuptools')
+                            yield
+
+
+def _needs_hiding(mod_name):
+    """
+    >>> _needs_hiding('setuptools')
+    True
+    >>> _needs_hiding('pkg_resources')
+    True
+    >>> _needs_hiding('setuptools_plugin')
+    False
+    >>> _needs_hiding('setuptools.__init__')
+    True
+    >>> _needs_hiding('distutils')
+    True
+    """
+    pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)')
+    return bool(pattern.match(mod_name))
+
+
+def hide_setuptools():
+    """
+    Remove references to setuptools' modules from sys.modules to allow the
+    invocation to import the most appropriate setuptools. This technique is
+    necessary to avoid issues such as #315 where setuptools upgrading itself
+    would fail to find a function declared in the metadata.
+    """
+    modules = filter(_needs_hiding, sys.modules)
+    _clear_modules(modules)
+
+
 def run_setup(setup_script, args):
     """Run a distutils setup script, sandboxed in its directory"""
-    old_dir = os.getcwd()
-    save_argv = sys.argv[:]
-    save_path = sys.path[:]
     setup_dir = os.path.abspath(os.path.dirname(setup_script))
-    temp_dir = os.path.join(setup_dir,'temp')
-    if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
-    save_tmp = tempfile.tempdir
-    save_modules = sys.modules.copy()
-    pr_state = pkg_resources.__getstate__()
-    try:
-        tempfile.tempdir = temp_dir
-        os.chdir(setup_dir)
+    with setup_context(setup_dir):
         try:
             sys.argv[:] = [setup_script]+list(args)
             sys.path.insert(0, setup_dir)
             # reset to include setup dir, w/clean callback list
             working_set.__init__()
             working_set.callbacks.append(lambda dist:dist.activate())
-            DirectorySandbox(setup_dir).run(
-                lambda: execfile(
-                    "setup.py",
-                    {'__file__':setup_script, '__name__':'__main__'}
-                )
-            )
-        except SystemExit:
-            v = sys.exc_info()[1]
+            def runner():
+                ns = dict(__file__=setup_script, __name__='__main__')
+                _execfile(setup_script, ns)
+            DirectorySandbox(setup_dir).run(runner)
+        except SystemExit as v:
             if v.args and v.args[0]:
                 raise
             # Normal exit, just return
-    finally:
-        pkg_resources.__setstate__(pr_state)
-        sys.modules.update(save_modules)
-        # remove any modules imported within the sandbox
-        del_modules = [
-            mod_name for mod_name in sys.modules
-            if mod_name not in save_modules
-            # exclude any encodings modules. See #285
-            and not mod_name.startswith('encodings.')
-        ]
-        list(map(sys.modules.__delitem__, del_modules))
-        os.chdir(old_dir)
-        sys.path[:] = save_path
-        sys.argv[:] = save_argv
-        tempfile.tempdir = save_tmp
 
 
 class AbstractSandbox:
@@ -268,7 +433,7 @@ class DirectorySandbox(AbstractSandbox):
             self._violation(operation, src, dst, *args, **kw)
         return (src,dst)
 
-    def open(self, file, flags, mode=0x1FF, *args, **kw):    # 0777
+    def open(self, file, flags, mode=0o777, *args, **kw):
         """Called for low-level os.open()"""
         if flags & WRITE_FLAGS and not self._ok(file):
             self._violation("os.open", file, flags, mode, *args, **kw)
diff --git a/awx/lib/site-packages/setuptools/script (dev).tmpl b/awx/lib/site-packages/setuptools/script (dev).tmpl
new file mode 100644
index 0000000000..d58b1bb5bf
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/script (dev).tmpl	
@@ -0,0 +1,5 @@
+# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').require(%(spec)r)
+__file__ = %(dev_path)r
+exec(compile(open(__file__).read(), __file__, 'exec'))
diff --git a/awx/lib/site-packages/setuptools/script.tmpl b/awx/lib/site-packages/setuptools/script.tmpl
new file mode 100644
index 0000000000..ff5efbcab3
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/script.tmpl
@@ -0,0 +1,3 @@
+# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
diff --git a/awx/lib/site-packages/setuptools/site-patch.py b/awx/lib/site-packages/setuptools/site-patch.py
index a7166f1407..c2168019ad 100644
--- a/awx/lib/site-packages/setuptools/site-patch.py
+++ b/awx/lib/site-packages/setuptools/site-patch.py
@@ -1,5 +1,6 @@
 def __boot():
-    import sys, os, os.path
+    import sys
+    import os
     PYTHONPATH = os.environ.get('PYTHONPATH')
     if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
         PYTHONPATH = []
@@ -49,13 +50,13 @@ def __boot():
         addsitedir(item)
 
     sys.__egginsert += oldpos           # restore effective old position
-    
-    d,nd = makepath(stdpath[0])
+
+    d, nd = makepath(stdpath[0])
     insert_at = None
     new_path = []
 
     for item in sys.path:
-        p,np = makepath(item)
+        p, np = makepath(item)
 
         if np==nd and insert_at is None:
             # We've hit the first 'system' path entry, so added entries go here
@@ -67,17 +68,9 @@ def __boot():
             # new path after the insert point, back-insert it
             new_path.insert(insert_at, item)
             insert_at += 1
-            
+
     sys.path[:] = new_path
 
-if __name__=='site':    
+if __name__=='site':
     __boot()
     del __boot
-    
-
-
-
-
-
-
-
diff --git a/awx/lib/site-packages/setuptools/ssl_support.py b/awx/lib/site-packages/setuptools/ssl_support.py
index 7b5f429f8f..cc7db067e9 100644
--- a/awx/lib/site-packages/setuptools/ssl_support.py
+++ b/awx/lib/site-packages/setuptools/ssl_support.py
@@ -178,12 +178,19 @@ class VerifyingHTTPSConn(HTTPSConnection):
         if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
             self.sock = sock
             self._tunnel()
+            # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
+            # change self.host to mean the proxy server host when tunneling is
+            # being used. Adapt, since we are interested in the destination
+            # host for the match_hostname() comparison.
+            actual_host = self._tunnel_host
+        else:
+            actual_host = self.host
 
         self.sock = ssl.wrap_socket(
             sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
         )
         try:
-            match_hostname(self.sock.getpeercert(), self.host)
+            match_hostname(self.sock.getpeercert(), actual_host)
         except CertificateError:
             self.sock.shutdown(socket.SHUT_RDWR)
             self.sock.close()
diff --git a/awx/lib/site-packages/setuptools/tests/__init__.py b/awx/lib/site-packages/setuptools/tests/__init__.py
index b5328ce67a..b8a29cbac2 100644
--- a/awx/lib/site-packages/setuptools/tests/__init__.py
+++ b/awx/lib/site-packages/setuptools/tests/__init__.py
@@ -1,8 +1,6 @@
 """Tests for the 'setuptools' package"""
 import sys
 import os
-import unittest
-from setuptools.tests import doctest
 import distutils.core
 import distutils.cmd
 from distutils.errors import DistutilsOptionError, DistutilsPlatformError
@@ -11,24 +9,13 @@ from distutils.core import Extension
 from distutils.version import LooseVersion
 from setuptools.compat import func_code
 
-from setuptools.compat import func_code
+import pytest
+
 import setuptools.dist
 import setuptools.depends as dep
 from setuptools import Feature
 from setuptools.depends import Require
 
-def additional_tests():
-    import doctest, unittest
-    suite = unittest.TestSuite((
-        doctest.DocFileSuite(
-            os.path.join('tests', 'api_tests.txt'),
-            optionflags=doctest.ELLIPSIS, package='pkg_resources',
-            ),
-        ))
-    if sys.platform == 'win32':
-        suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
-    return suite
-
 def makeSetup(**args):
     """Return distribution from 'setup(**args)', without executing commands"""
 
@@ -43,7 +30,12 @@ def makeSetup(**args):
         distutils.core._setup_stop_after = None
 
 
-class DependsTests(unittest.TestCase):
+needs_bytecode = pytest.mark.skipif(
+    not hasattr(dep, 'get_module_constant'),
+    reason="bytecode support not available",
+)
+
+class TestDepends:
 
     def testExtractConst(self):
         if not hasattr(dep, 'extract_constant'):
@@ -56,86 +48,77 @@ class DependsTests(unittest.TestCase):
             y = z
 
         fc = func_code(f1)
+
         # unrecognized name
-        self.assertEqual(dep.extract_constant(fc,'q', -1), None)
+        assert dep.extract_constant(fc,'q', -1) is None
 
         # constant assigned
-        self.assertEqual(dep.extract_constant(fc,'x', -1), "test")
+        dep.extract_constant(fc,'x', -1) == "test"
 
         # expression assigned
-        self.assertEqual(dep.extract_constant(fc,'y', -1), -1)
+        dep.extract_constant(fc,'y', -1) == -1
 
         # recognized name, not assigned
-        self.assertEqual(dep.extract_constant(fc,'z', -1), None)
+        dep.extract_constant(fc,'z', -1) is None
 
     def testFindModule(self):
-        self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
-        self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
+        with pytest.raises(ImportError):
+            dep.find_module('no-such.-thing')
+        with pytest.raises(ImportError):
+            dep.find_module('setuptools.non-existent')
         f,p,i = dep.find_module('setuptools.tests')
         f.close()
 
+    @needs_bytecode
     def testModuleExtract(self):
-        if not hasattr(dep, 'get_module_constant'):
-            # skip on non-bytecode platforms
-            return
-
         from email import __version__
-        self.assertEqual(
-            dep.get_module_constant('email','__version__'), __version__
-        )
-        self.assertEqual(
-            dep.get_module_constant('sys','version'), sys.version
-        )
-        self.assertEqual(
-            dep.get_module_constant('setuptools.tests','__doc__'),__doc__
-        )
+        assert dep.get_module_constant('email','__version__') == __version__
+        assert dep.get_module_constant('sys','version') == sys.version
+        assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__
 
+    @needs_bytecode
     def testRequire(self):
-        if not hasattr(dep, 'extract_constant'):
-            # skip on non-bytecode platformsh
-            return
-
         req = Require('Email','1.0.3','email')
 
-        self.assertEqual(req.name, 'Email')
-        self.assertEqual(req.module, 'email')
-        self.assertEqual(req.requested_version, '1.0.3')
-        self.assertEqual(req.attribute, '__version__')
-        self.assertEqual(req.full_name(), 'Email-1.0.3')
+        assert req.name == 'Email'
+        assert req.module == 'email'
+        assert req.requested_version == '1.0.3'
+        assert req.attribute == '__version__'
+        assert req.full_name() == 'Email-1.0.3'
 
         from email import __version__
-        self.assertEqual(req.get_version(), __version__)
-        self.assertTrue(req.version_ok('1.0.9'))
-        self.assertTrue(not req.version_ok('0.9.1'))
-        self.assertTrue(not req.version_ok('unknown'))
+        assert req.get_version() == __version__
+        assert req.version_ok('1.0.9')
+        assert not req.version_ok('0.9.1')
+        assert not req.version_ok('unknown')
 
-        self.assertTrue(req.is_present())
-        self.assertTrue(req.is_current())
+        assert req.is_present()
+        assert req.is_current()
 
         req = Require('Email 3000','03000','email',format=LooseVersion)
-        self.assertTrue(req.is_present())
-        self.assertTrue(not req.is_current())
-        self.assertTrue(not req.version_ok('unknown'))
+        assert req.is_present()
+        assert not req.is_current()
+        assert not req.version_ok('unknown')
 
         req = Require('Do-what-I-mean','1.0','d-w-i-m')
-        self.assertTrue(not req.is_present())
-        self.assertTrue(not req.is_current())
+        assert not req.is_present()
+        assert not req.is_current()
 
         req = Require('Tests', None, 'tests', homepage="http://example.com")
-        self.assertEqual(req.format, None)
-        self.assertEqual(req.attribute, None)
-        self.assertEqual(req.requested_version, None)
-        self.assertEqual(req.full_name(), 'Tests')
-        self.assertEqual(req.homepage, 'http://example.com')
+        assert req.format is None
+        assert req.attribute is None
+        assert req.requested_version is None
+        assert req.full_name() == 'Tests'
+        assert req.homepage == 'http://example.com'
 
         paths = [os.path.dirname(p) for p in __path__]
-        self.assertTrue(req.is_present(paths))
-        self.assertTrue(req.is_current(paths))
+        assert req.is_present(paths)
+        assert req.is_current(paths)
 
 
-class DistroTests(unittest.TestCase):
+class TestDistro:
 
-    def setUp(self):
+    def setup_method(self, method):
         self.e1 = Extension('bar.ext',['bar.c'])
         self.e2 = Extension('c.y', ['y.c'])
 
@@ -147,21 +130,21 @@ class DistroTests(unittest.TestCase):
         )
 
     def testDistroType(self):
-        self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
+        assert isinstance(self.dist,setuptools.dist.Distribution)
 
     def testExcludePackage(self):
         self.dist.exclude_package('a')
-        self.assertEqual(self.dist.packages, ['b','c'])
+        assert self.dist.packages == ['b','c']
 
         self.dist.exclude_package('b')
-        self.assertEqual(self.dist.packages, ['c'])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
+        assert self.dist.packages == ['c']
+        assert self.dist.py_modules == ['x']
+        assert self.dist.ext_modules == [self.e1, self.e2]
 
         self.dist.exclude_package('c')
-        self.assertEqual(self.dist.packages, [])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1])
+        assert self.dist.packages == []
+        assert self.dist.py_modules == ['x']
+        assert self.dist.ext_modules == [self.e1]
 
         # test removals from unspecified options
         makeSetup().exclude_package('x')
@@ -169,21 +152,21 @@ class DistroTests(unittest.TestCase):
     def testIncludeExclude(self):
         # remove an extension
         self.dist.exclude(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2])
+        assert self.dist.ext_modules == [self.e2]
 
         # add it back in
         self.dist.include(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+        assert self.dist.ext_modules == [self.e2, self.e1]
 
         # should not add duplicate
         self.dist.include(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+        assert self.dist.ext_modules == [self.e2, self.e1]
 
     def testExcludePackages(self):
         self.dist.exclude(packages=['c','b','a'])
-        self.assertEqual(self.dist.packages, [])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1])
+        assert self.dist.packages == []
+        assert self.dist.py_modules == ['x']
+        assert self.dist.ext_modules == [self.e1]
 
     def testEmpty(self):
         dist = makeSetup()
@@ -192,49 +175,41 @@ class DistroTests(unittest.TestCase):
         dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
 
     def testContents(self):
-        self.assertTrue(self.dist.has_contents_for('a'))
+        assert self.dist.has_contents_for('a')
         self.dist.exclude_package('a')
-        self.assertTrue(not self.dist.has_contents_for('a'))
+        assert not self.dist.has_contents_for('a')
 
-        self.assertTrue(self.dist.has_contents_for('b'))
+        assert self.dist.has_contents_for('b')
         self.dist.exclude_package('b')
-        self.assertTrue(not self.dist.has_contents_for('b'))
+        assert not self.dist.has_contents_for('b')
 
-        self.assertTrue(self.dist.has_contents_for('c'))
+        assert self.dist.has_contents_for('c')
         self.dist.exclude_package('c')
-        self.assertTrue(not self.dist.has_contents_for('c'))
+        assert not self.dist.has_contents_for('c')
 
     def testInvalidIncludeExclude(self):
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, nonexistent_option='x'
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, nonexistent_option='x'
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, packages={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, packages={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, ext_modules={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, ext_modules={'x':'y'}
-        )
+        with pytest.raises(DistutilsSetupError):
+            self.dist.include(nonexistent_option='x')
+        with pytest.raises(DistutilsSetupError):
+            self.dist.exclude(nonexistent_option='x')
+        with pytest.raises(DistutilsSetupError):
+            self.dist.include(packages={'x':'y'})
+        with pytest.raises(DistutilsSetupError):
+            self.dist.exclude(packages={'x':'y'})
+        with pytest.raises(DistutilsSetupError):
+            self.dist.include(ext_modules={'x':'y'})
+        with pytest.raises(DistutilsSetupError):
+            self.dist.exclude(ext_modules={'x':'y'})
 
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, package_dir=['q']
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, package_dir=['q']
-        )
+        with pytest.raises(DistutilsSetupError):
+            self.dist.include(package_dir=['q'])
+        with pytest.raises(DistutilsSetupError):
+            self.dist.exclude(package_dir=['q'])
 
 
-class FeatureTests(unittest.TestCase):
+class TestFeatures:
 
-    def setUp(self):
+    def setup_method(self, method):
         self.req = Require('Distutils','1.0.3','distutils')
         self.dist = makeSetup(
             features={
@@ -256,80 +231,75 @@ class FeatureTests(unittest.TestCase):
         )
 
     def testDefaults(self):
-        self.assertTrue(not
-            Feature(
-                "test",standard=True,remove='x',available=False
-            ).include_by_default()
-        )
-        self.assertTrue(
-            Feature("test",standard=True,remove='x').include_by_default()
-        )
+        assert not Feature(
+            "test",standard=True,remove='x',available=False
+        ).include_by_default()
+        assert Feature("test",standard=True,remove='x').include_by_default()
         # Feature must have either kwargs, removes, or require_features
-        self.assertRaises(DistutilsSetupError, Feature, "test")
+        with pytest.raises(DistutilsSetupError):
+            Feature("test")
 
     def testAvailability(self):
-        self.assertRaises(
-            DistutilsPlatformError,
-            self.dist.features['dwim'].include_in, self.dist
-        )
+        with pytest.raises(DistutilsPlatformError):
+            self.dist.features['dwim'].include_in(self.dist)
 
     def testFeatureOptions(self):
         dist = self.dist
-        self.assertTrue(
+        assert (
             ('with-dwim',None,'include DWIM') in dist.feature_options
         )
-        self.assertTrue(
+        assert (
             ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
         )
-        self.assertTrue(
+        assert (
             ('with-bar',None,'include bar (default)') in dist.feature_options
         )
-        self.assertTrue(
+        assert (
             ('without-bar',None,'exclude bar') in dist.feature_options
         )
-        self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
-        self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
-        self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
-        self.assertTrue(not 'without-baz' in dist.feature_negopt)
+        assert dist.feature_negopt['without-foo'] == 'with-foo'
+        assert dist.feature_negopt['without-bar'] == 'with-bar'
+        assert dist.feature_negopt['without-dwim'] == 'with-dwim'
+        assert (not 'without-baz' in dist.feature_negopt)
 
     def testUseFeatures(self):
         dist = self.dist
-        self.assertEqual(dist.with_foo,1)
-        self.assertEqual(dist.with_bar,0)
-        self.assertEqual(dist.with_baz,1)
-        self.assertTrue(not 'bar_et' in dist.py_modules)
-        self.assertTrue(not 'pkg.bar' in dist.packages)
-        self.assertTrue('pkg.baz' in dist.packages)
-        self.assertTrue('scripts/baz_it' in dist.scripts)
-        self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
-        self.assertEqual(dist.ext_modules,[])
-        self.assertEqual(dist.require_features, [self.req])
+        assert dist.with_foo == 1
+        assert dist.with_bar == 0
+        assert dist.with_baz == 1
+        assert (not 'bar_et' in dist.py_modules)
+        assert (not 'pkg.bar' in dist.packages)
+        assert ('pkg.baz' in dist.packages)
+        assert ('scripts/baz_it' in dist.scripts)
+        assert (('libfoo','foo/foofoo.c') in dist.libraries)
+        assert dist.ext_modules == []
+        assert dist.require_features == [self.req]
 
         # If we ask for bar, it should fail because we explicitly disabled
         # it on the command line
-        self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
+        with pytest.raises(DistutilsOptionError):
+            dist.include_feature('bar')
 
     def testFeatureWithInvalidRemove(self):
-        self.assertRaises(
-            SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
-        )
+        with pytest.raises(SystemExit):
+            makeSetup(features={'x':Feature('x', remove='y')})
 
-class TestCommandTests(unittest.TestCase):
+class TestCommandTests:
 
     def testTestIsCommand(self):
         test_cmd = makeSetup().get_command_obj('test')
-        self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
+        assert (isinstance(test_cmd, distutils.cmd.Command))
 
     def testLongOptSuiteWNoDefault(self):
         ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
         ts1 = ts1.get_command_obj('test')
         ts1.ensure_finalized()
-        self.assertEqual(ts1.test_suite, 'foo.tests.suite')
+        assert ts1.test_suite == 'foo.tests.suite'
 
     def testDefaultSuite(self):
         ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
         ts2.ensure_finalized()
-        self.assertEqual(ts2.test_suite, 'bar.tests.suite')
+        assert ts2.test_suite == 'bar.tests.suite'
 
     def testDefaultWModuleOnCmdLine(self):
         ts3 = makeSetup(
@@ -337,16 +307,17 @@ class TestCommandTests(unittest.TestCase):
             script_args=['test','-m','foo.tests']
         ).get_command_obj('test')
         ts3.ensure_finalized()
-        self.assertEqual(ts3.test_module, 'foo.tests')
-        self.assertEqual(ts3.test_suite,  'foo.tests.test_suite')
+        assert ts3.test_module == 'foo.tests'
+        assert ts3.test_suite == 'foo.tests.test_suite'
 
     def testConflictingOptions(self):
         ts4 = makeSetup(
             script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
         ).get_command_obj('test')
-        self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
+        with pytest.raises(DistutilsOptionError):
+            ts4.ensure_finalized()
 
     def testNoSuite(self):
         ts5 = makeSetup().get_command_obj('test')
         ts5.ensure_finalized()
-        self.assertEqual(ts5.test_suite, None)
+        assert ts5.test_suite == None
diff --git a/awx/lib/site-packages/setuptools/tests/contexts.py b/awx/lib/site-packages/setuptools/tests/contexts.py
new file mode 100644
index 0000000000..4a46176557
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/tests/contexts.py
@@ -0,0 +1,93 @@
+import tempfile
+import os
+import shutil
+import sys
+import contextlib
+import site
+
+from ..compat import StringIO
+
+
+@contextlib.contextmanager
+def tempdir(cd=lambda dir:None, **kwargs):
+    temp_dir = tempfile.mkdtemp(**kwargs)
+    orig_dir = os.getcwd()
+    try:
+        cd(temp_dir)
+        yield temp_dir
+    finally:
+        cd(orig_dir)
+        shutil.rmtree(temp_dir)
+
+
+@contextlib.contextmanager
+def environment(**replacements):
+    """
+    In a context, patch the environment with replacements. Pass None values
+    to clear the values.
+    """
+    saved = dict(
+        (key, os.environ[key])
+        for key in replacements
+        if key in os.environ
+    )
+
+    # remove values that are null
+    remove = (key for (key, value) in replacements.items() if value is None)
+    for key in list(remove):
+        os.environ.pop(key, None)
+        replacements.pop(key)
+
+    os.environ.update(replacements)
+
+    try:
+        yield saved
+    finally:
+        for key in replacements:
+            os.environ.pop(key, None)
+        os.environ.update(saved)
+
+
+@contextlib.contextmanager
+def argv(repl):
+    old_argv = sys.argv[:]
+    sys.argv[:] = repl
+    yield
+    sys.argv[:] = old_argv
+
+
+@contextlib.contextmanager
+def quiet():
+    """
+    Redirect stdout/stderr to StringIO objects to prevent console output from
+    distutils commands.
+    """
+
+    old_stdout = sys.stdout
+    old_stderr = sys.stderr
+    new_stdout = sys.stdout = StringIO()
+    new_stderr = sys.stderr = StringIO()
+    try:
+        yield new_stdout, new_stderr
+    finally:
+        new_stdout.seek(0)
+        new_stderr.seek(0)
+        sys.stdout = old_stdout
+        sys.stderr = old_stderr
+
+
+@contextlib.contextmanager
+def save_user_site_setting():
+    saved = site.ENABLE_USER_SITE
+    try:
+        yield saved
+    finally:
+        site.ENABLE_USER_SITE = saved
+
+
+@contextlib.contextmanager
+def suppress_exceptions(*excs):
+    try:
+        yield
+    except excs:
+        pass
diff --git a/awx/lib/site-packages/setuptools/tests/environment.py b/awx/lib/site-packages/setuptools/tests/environment.py
index 476d280ae2..a23c0504e7 100644
--- a/awx/lib/site-packages/setuptools/tests/environment.py
+++ b/awx/lib/site-packages/setuptools/tests/environment.py
@@ -1,119 +1,10 @@
 import os
-import zipfile
 import sys
-import tempfile
-import unittest
-import shutil
-import stat
 import unicodedata
 
 from subprocess import Popen as _Popen, PIPE as _PIPE
 
 
-def _extract(self, member, path=None, pwd=None):
-    """for zipfile py2.5 borrowed from cpython"""
-    if not isinstance(member, zipfile.ZipInfo):
-        member = self.getinfo(member)
-
-    if path is None:
-        path = os.getcwd()
-
-    return _extract_member(self, member, path, pwd)
-
-
-def _extract_from_zip(self, name, dest_path):
-    dest_file = open(dest_path, 'wb')
-    try:
-        dest_file.write(self.read(name))
-    finally:
-        dest_file.close()
-
-
-def _extract_member(self, member, targetpath, pwd):
-    """for zipfile py2.5 borrowed from cpython"""
-    # build the destination pathname, replacing
-    # forward slashes to platform specific separators.
-    # Strip trailing path separator, unless it represents the root.
-    if (targetpath[-1:] in (os.path.sep, os.path.altsep)
-            and len(os.path.splitdrive(targetpath)[1]) > 1):
-        targetpath = targetpath[:-1]
-
-    # don't include leading "/" from file name if present
-    if member.filename[0] == '/':
-        targetpath = os.path.join(targetpath, member.filename[1:])
-    else:
-        targetpath = os.path.join(targetpath, member.filename)
-
-    targetpath = os.path.normpath(targetpath)
-
-    # Create all upper directories if necessary.
-    upperdirs = os.path.dirname(targetpath)
-    if upperdirs and not os.path.exists(upperdirs):
-        os.makedirs(upperdirs)
-
-    if member.filename[-1] == '/':
-        if not os.path.isdir(targetpath):
-            os.mkdir(targetpath)
-        return targetpath
-
-    _extract_from_zip(self, member.filename, targetpath)
-
-    return targetpath
-
-
-def _remove_dir(target):
-
-    #on windows this seems to a problem
-    for dir_path, dirs, files in os.walk(target):
-        os.chmod(dir_path, stat.S_IWRITE)
-        for filename in files:
-            os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE)
-    shutil.rmtree(target)
-
-
-class ZippedEnvironment(unittest.TestCase):
-
-    datafile = None
-    dataname = None
-    old_cwd = None
-
-    def setUp(self):
-        if self.datafile is None or self.dataname is None:
-            return
-
-        if not os.path.isfile(self.datafile):
-            self.old_cwd = None
-            return
-
-        self.old_cwd = os.getcwd()
-
-        self.temp_dir = tempfile.mkdtemp()
-        zip_file, source, target = [None, None, None]
-        try:
-            zip_file = zipfile.ZipFile(self.datafile)
-            for files in zip_file.namelist():
-                _extract(zip_file, files, self.temp_dir)
-        finally:
-            if zip_file:
-                zip_file.close()
-            del zip_file
-
-        os.chdir(os.path.join(self.temp_dir, self.dataname))
-
-    def tearDown(self):
-        #Assume setUp was never completed
-        if self.dataname is None or self.datafile is None:
-            return
-
-        try:
-            if self.old_cwd:
-                os.chdir(self.old_cwd)
-                _remove_dir(self.temp_dir)
-        except OSError:
-            #sigh?
-            pass
-
-
 def _which_dirs(cmd):
     result = set()
     for path in os.environ.get('PATH', '').split(os.pathsep):
@@ -147,10 +38,13 @@ def run_setup_py(cmd, pypath=None, path=None,
 
     cmd = [sys.executable, "setup.py"] + list(cmd)
 
-    #regarding the shell argument, see: http://bugs.python.org/issue8557
+    # http://bugs.python.org/issue8557
+    shell = sys.platform == 'win32'
+
     try:
-        proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE,
-                      shell=(sys.platform == 'win32'), env=env)
+        proc = _Popen(
+            cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env,
+        )
 
         data = proc.communicate()[data_stream]
     except OSError:
@@ -158,7 +52,8 @@ def run_setup_py(cmd, pypath=None, path=None,
 
     #decode the console string if needed
     if hasattr(data,  "decode"):
-        data = data.decode()  # should use the preffered encoding
+        # use the default encoding
+        data = data.decode()
         data = unicodedata.normalize('NFC', data)
 
     #communciate calls wait()
diff --git a/awx/lib/site-packages/setuptools/tests/fixtures.py b/awx/lib/site-packages/setuptools/tests/fixtures.py
new file mode 100644
index 0000000000..c70c38cb71
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/tests/fixtures.py
@@ -0,0 +1,27 @@
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+import pytest
+
+from . import contexts
+
+
+@pytest.yield_fixture
+def user_override():
+    """
+    Override site.USER_BASE and site.USER_SITE with temporary directories in
+    a context.
+    """
+    with contexts.tempdir() as user_base:
+        with mock.patch('site.USER_BASE', user_base):
+            with contexts.tempdir() as user_site:
+                with mock.patch('site.USER_SITE', user_site):
+                    with contexts.save_user_site_setting():
+                        yield
+
+
+@pytest.yield_fixture
+def tmpdir_cwd(tmpdir):
+    with tmpdir.as_cwd() as orig:
+        yield orig
diff --git a/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html
new file mode 100644
index 0000000000..92e4702f63
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html
@@ -0,0 +1,3 @@
+
+bad old link
+
diff --git a/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
new file mode 100644
index 0000000000..fefb028bd3
--- /dev/null
+++ b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
@@ -0,0 +1,4 @@
+
+foobar-0.1.tar.gz
+external homepage
+ diff --git a/awx/lib/site-packages/setuptools/tests/py26compat.py b/awx/lib/site-packages/setuptools/tests/py26compat.py index d4fb891af6..c56808816e 100644 --- a/awx/lib/site-packages/setuptools/tests/py26compat.py +++ b/awx/lib/site-packages/setuptools/tests/py26compat.py @@ -1,14 +1,14 @@ -import unittest +import sys +import tarfile +import contextlib -try: - # provide skipIf for Python 2.4-2.6 - skipIf = unittest.skipIf -except AttributeError: - def skipIf(condition, reason): - def skipper(func): - def skip(*args, **kwargs): - return - if condition: - return skip - return func - return skipper +def _tarfile_open_ex(*args, **kwargs): + """ + Extend result as a context manager. + """ + return contextlib.closing(tarfile.open(*args, **kwargs)) + +if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): + tarfile_open = _tarfile_open_ex +else: + tarfile_open = tarfile.open diff --git a/awx/lib/site-packages/setuptools/tests/server.py b/awx/lib/site-packages/setuptools/tests/server.py index ae2381e355..6b21427992 100644 --- a/awx/lib/site-packages/setuptools/tests/server.py +++ b/awx/lib/site-packages/setuptools/tests/server.py @@ -1,11 +1,10 @@ """Basic http server for tests to simulate PyPI or custom indexes """ -import sys + import time import threading from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import (urllib2, URLError, HTTPServer, - SimpleHTTPRequestHandler) +from setuptools.compat import HTTPServer, SimpleHTTPRequestHandler class IndexServer(HTTPServer): """Basic single-threaded http server simulating a package index @@ -23,12 +22,8 @@ class IndexServer(HTTPServer): HTTPServer.__init__(self, server_address, RequestHandlerClass) self._run = True - def serve(self): - while self._run: - self.handle_request() - def start(self): - self.thread = threading.Thread(target=self.serve) + self.thread = threading.Thread(target=self.serve_forever) self.thread.start() def stop(self): @@ -37,19 +32,7 @@ class IndexServer(HTTPServer): # Let the server finish the last request and wait for a new one. time.sleep(0.1) - # self.shutdown is not supported on python < 2.6, so just - # set _run to false, and make a request, causing it to - # terminate. - self._run = False - url = 'http://127.0.0.1:%(server_port)s/' % vars(self) - try: - if sys.version_info >= (2, 6): - urllib2.urlopen(url, timeout=5) - else: - urllib2.urlopen(url) - except URLError: - # ignore any errors; all that's important is the request - pass + self.shutdown() self.thread.join() self.socket.close() @@ -77,6 +60,6 @@ class MockServer(HTTPServer, threading.Thread): def run(self): self.serve_forever() + @property def url(self): return 'http://localhost:%(server_port)s/' % vars(self) - url = property(url) diff --git a/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py b/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py index 1a12218645..ccfb2ea76b 100644 --- a/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py +++ b/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py @@ -1,50 +1,32 @@ """develop tests """ -import sys -import os, re, shutil, tempfile, unittest -import tempfile -import site +import os +import re + +import pytest -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.bdist_egg import bdist_egg -from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution +from . import contexts + SETUP_PY = """\ from setuptools import setup setup(name='foo', py_modules=['hi']) """ -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - f = open('setup.py', 'w') +@pytest.yield_fixture +def setup_context(tmpdir): + with (tmpdir/'setup.py').open('w') as f: f.write(SETUP_PY) - f.close() - f = open('hi.py', 'w') + with (tmpdir/'hi.py').open('w') as f: f.write('1\n') - f.close() - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() + with tmpdir.as_cwd(): + yield tmpdir - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - def test_bdist_egg(self): +class Test: + def test_bdist_egg(self, setup_context, user_override): dist = Distribution(dict( script_name='setup.py', script_args=['bdist_egg'], @@ -52,18 +34,10 @@ class TestDevelopTest(unittest.TestCase): py_modules=['hi'] )) os.makedirs(os.path.join('build', 'src')) - old_stdout = sys.stdout - sys.stdout = o = StringIO() - try: + with contexts.quiet(): dist.parse_command_line() dist.run_commands() - finally: - sys.stdout = old_stdout # let's see if we got our egg link at the right place [content] = os.listdir('dist') - self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) - -def test_suite(): - return unittest.makeSuite(TestDevelopTest) - + assert re.match('foo-0.0.0-py[23].\d.egg$', content) diff --git a/awx/lib/site-packages/setuptools/tests/test_build_ext.py b/awx/lib/site-packages/setuptools/tests/test_build_ext.py index a520ced9d6..0719ba44ae 100644 --- a/awx/lib/site-packages/setuptools/tests/test_build_ext.py +++ b/awx/lib/site-packages/setuptools/tests/test_build_ext.py @@ -1,20 +1,18 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext +import distutils.command.build_ext as orig + from setuptools.command.build_ext import build_ext from setuptools.dist import Distribution -class TestBuildExtTest(unittest.TestCase): - +class TestBuildExt: def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map + """ + Setuptools needs to give back the same + result as distutils, even if the fullname + is not in ext_map. + """ dist = Distribution() cmd = build_ext(dist) cmd.ext_map['foo/bar'] = '' res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') + wanted = orig.build_ext.get_ext_filename(cmd, 'foo') assert res == wanted - diff --git a/awx/lib/site-packages/setuptools/tests/test_develop.py b/awx/lib/site-packages/setuptools/tests/test_develop.py index 7b90161a8a..ed1b194a78 100644 --- a/awx/lib/site-packages/setuptools/tests/test_develop.py +++ b/awx/lib/site-packages/setuptools/tests/test_develop.py @@ -1,14 +1,12 @@ """develop tests """ -import sys -import os, shutil, tempfile, unittest -import tempfile +import os +import shutil import site +import sys +import tempfile -from distutils.errors import DistutilsError from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.compat import StringIO from setuptools.dist import Distribution SETUP_PY = """\ @@ -23,10 +21,10 @@ setup(name='foo', INIT_PY = """print "foo" """ -class TestDevelopTest(unittest.TestCase): +class TestDevelopTest: - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + def setup_method(self, method): + if hasattr(sys, 'real_prefix'): return # Directory structure @@ -50,8 +48,8 @@ class TestDevelopTest(unittest.TestCase): self.old_site = site.USER_SITE site.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): + def teardown_method(self, method): + if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): return os.chdir(self.old_cwd) @@ -62,7 +60,7 @@ class TestDevelopTest(unittest.TestCase): site.USER_SITE = self.old_site def test_develop(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + if hasattr(sys, 'real_prefix'): return dist = Distribution( dict(name='foo', @@ -86,7 +84,7 @@ class TestDevelopTest(unittest.TestCase): # let's see if we got our egg link at the right place content = os.listdir(site.USER_SITE) content.sort() - self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) + assert content == ['easy-install.pth', 'foo.egg-link'] # Check that we are using the right code. egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') @@ -100,23 +98,6 @@ class TestDevelopTest(unittest.TestCase): finally: init_file.close() if sys.version < "3": - self.assertEqual(init, 'print "foo"') + assert init == 'print "foo"' else: - self.assertEqual(init, 'print("foo")') - - def notest_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError: - e = sys.exc_info()[1] - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) + assert init == 'print("foo")' diff --git a/awx/lib/site-packages/setuptools/tests/test_dist_info.py b/awx/lib/site-packages/setuptools/tests/test_dist_info.py index a8adb68c2d..6d0ab58792 100644 --- a/awx/lib/site-packages/setuptools/tests/test_dist_info.py +++ b/awx/lib/site-packages/setuptools/tests/test_dist_info.py @@ -3,28 +3,20 @@ import os import shutil import tempfile -import unittest -import textwrap -try: - import ast -except: - pass +import pytest import pkg_resources +from .textwrap import DALS -from setuptools.tests.py26compat import skipIf -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() - -class TestDistInfo(unittest.TestCase): +class TestDistInfo: def test_distinfo(self): - dists = {} - for d in pkg_resources.find_distributions(self.tmpdir): - dists[d.project_name] = d + dists = dict( + (d.project_name, d) + for d in pkg_resources.find_distributions(self.tmpdir) + ) assert len(dists) == 2, dists @@ -34,50 +26,45 @@ class TestDistInfo(unittest.TestCase): assert versioned.version == '2.718' # from filename assert unversioned.version == '0.3' # from METADATA - @skipIf('ast' not in globals(), - "ast is used to test conditional dependencies (Python >= 2.6)") + @pytest.mark.importorskip('ast') def test_conditional_dependencies(self): - requires = [pkg_resources.Requirement.parse('splort==4'), - pkg_resources.Requirement.parse('quux>=1.1')] + specs = 'splort==4', 'quux>=1.1' + requires = list(map(pkg_resources.Requirement.parse, specs)) for d in pkg_resources.find_distributions(self.tmpdir): - self.assertEqual(d.requires(), requires[:1]) - self.assertEqual(d.requires(extras=('baz',)), requires) - self.assertEqual(d.extras, ['baz']) + assert d.requires() == requires[:1] + assert d.requires(extras=('baz',)) == requires + assert d.extras == ['baz'] - def setUp(self): + metadata_template = DALS(""" + Metadata-Version: 1.2 + Name: {name} + {version} + Requires-Dist: splort (==4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """) + + def setup_method(self, method): self.tmpdir = tempfile.mkdtemp() - versioned = os.path.join(self.tmpdir, - 'VersionedDistribution-2.718.dist-info') + dist_info_name = 'VersionedDistribution-2.718.dist-info' + versioned = os.path.join(self.tmpdir, dist_info_name) os.mkdir(versioned) - metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: VersionedDistribution - Requires-Dist: splort (4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - unversioned = os.path.join(self.tmpdir, - 'UnversionedDistribution.dist-info') + with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file: + metadata = self.metadata_template.format( + name='VersionedDistribution', + version='', + ).replace('\n\n', '\n') + metadata_file.write(metadata) + dist_info_name = 'UnversionedDistribution.dist-info' + unversioned = os.path.join(self.tmpdir, dist_info_name) os.mkdir(unversioned) - metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: UnversionedDistribution - Version: 0.3 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() + with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file: + metadata = self.metadata_template.format( + name='UnversionedDistribution', + version='Version: 0.3', + ) + metadata_file.write(metadata) - def tearDown(self): + def teardown_method(self, method): shutil.rmtree(self.tmpdir) diff --git a/awx/lib/site-packages/setuptools/tests/test_easy_install.py b/awx/lib/site-packages/setuptools/tests/test_easy_install.py index d2cc7a0fe6..4331d30e68 100644 --- a/awx/lib/site-packages/setuptools/tests/test_easy_install.py +++ b/awx/lib/site-packages/setuptools/tests/test_easy_install.py @@ -1,29 +1,44 @@ +# -*- coding: utf-8 -*- + """Easy install Tests """ +from __future__ import absolute_import + import sys import os import shutil import tempfile -import unittest import site import contextlib -import textwrap import tarfile import logging -import distutils.core +import itertools +import distutils.errors -from setuptools.compat import StringIO, BytesIO, next, urlparse -from setuptools.sandbox import run_setup, SandboxViolation -from setuptools.command.easy_install import ( - easy_install, fix_jython_executable, get_script_args, nt_quote_arg) +import pytest +try: + from unittest import mock +except ImportError: + import mock + +from setuptools import sandbox +from setuptools import compat +from setuptools.compat import StringIO, BytesIO, urlparse +from setuptools.sandbox import run_setup +import setuptools.command.easy_install as ei from setuptools.command.easy_install import PthDistributions from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution -from pkg_resources import working_set, VersionConflict +from pkg_resources import working_set from pkg_resources import Distribution as PRDistribution import setuptools.tests.server import pkg_resources +from .py26compat import tarfile_open +from . import contexts +from .textwrap import DALS + + class FakeDist(object): def get_entry_map(self, group): if group != 'console_scripts': @@ -33,134 +48,123 @@ class FakeDist(object): def as_requirement(self): return 'spec' -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point +SETUP_PY = DALS(""" + from setuptools import setup -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) + setup(name='foo') + """) -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): +class TestEasyInstallTest: def test_install_site_py(self): dist = Distribution() - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.sitepy_installed = False cmd.install_dir = tempfile.mkdtemp() try: cmd.install_site_py() sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assertTrue(os.path.exists(sitepy)) + assert os.path.exists(sitepy) finally: shutil.rmtree(cmd.install_dir) def test_get_script_args(self): + header = ei.CommandSpec.best().from_environment().as_header() + expected = header + DALS(""" + # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' + __requires__ = 'spec' + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.exit( + load_entry_point('spec', 'console_scripts', 'name')() + ) + """) dist = FakeDist() - old_platform = sys.platform - try: - name, script = [i for i in next(get_script_args(dist))][0:2] - finally: - sys.platform = old_platform + args = next(ei.ScriptWriter.get_args(dist)) + name, script = itertools.islice(args, 2) - self.assertEqual(script, WANTED) + assert script == expected def test_no_find_links(self): # new option '--no-find-links', that blocks find-links added at # the project level dist = Distribution() - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.check_pth_processing = lambda: True cmd.no_find_links = True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() - self.assertEqual(cmd.package_index.scanned_urls, {}) + assert cmd.package_index.scanned_urls == {} # let's try without it (default behavior) - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.check_pth_processing = lambda: True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() keys = sorted(cmd.package_index.scanned_urls.keys()) - self.assertEqual(keys, ['link1', 'link2']) + assert keys == ['link1', 'link2'] + + def test_write_exception(self): + """ + Test that `cant_write_to_target` is rendered as a DistutilsError. + """ + dist = Distribution() + cmd = ei.easy_install(dist) + cmd.install_dir = os.getcwd() + with pytest.raises(distutils.errors.DistutilsError): + cmd.cant_write_to_target() -class TestPTHFileWriter(unittest.TestCase): +class TestPTHFileWriter: def test_add_from_cwd_site_sets_dirty(self): '''a pth file manager should set dirty if a distribution is in site but also the cwd ''' pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assertTrue(not pth.dirty) + assert not pth.dirty pth.add(PRDistribution(os.getcwd())) - self.assertTrue(pth.dirty) + assert pth.dirty def test_add_from_site_is_ignored(self): - if os.name != 'nt': - location = '/test/location/does-not-have-to-exist' - else: - location = 'c:\\does_not_exist' + location = '/test/location/does-not-have-to-exist' + # PthDistributions expects all locations to be normalized + location = pkg_resources.normalize_path(location) pth = PthDistributions('does-not_exist', [location, ]) - self.assertTrue(not pth.dirty) + assert not pth.dirty pth.add(PRDistribution(location)) - self.assertTrue(not pth.dirty) + assert not pth.dirty -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') +@pytest.yield_fixture +def setup_context(tmpdir): + with (tmpdir/'setup.py').open('w') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) + with tmpdir.as_cwd(): + yield tmpdir - self.old_enable_site = site.ENABLE_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - site.ENABLE_USER_SITE = self.old_enable_site - easy_install_pkg.__file__ = self.old_file +@pytest.mark.usefixtures("user_override") +@pytest.mark.usefixtures("setup_context") +class TestUserInstallTest: + @mock.patch('setuptools.command.easy_install.__file__', None) def test_user_install_implied(self): + easy_install_pkg.__file__ = site.USER_SITE site.ENABLE_USER_SITE = True # disabled sometimes #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.args = ['py'] cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') + assert cmd.user, 'user should be implied' def test_multiproc_atexit(self): try: @@ -178,10 +182,10 @@ class TestUserInstallTest(unittest.TestCase): #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.args = ['py'] cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') + assert not cmd.user, 'NOT user should be implied' def test_local_index(self): # make sure the local index is used @@ -190,11 +194,8 @@ class TestUserInstallTest(unittest.TestCase): new_location = tempfile.mkdtemp() target = tempfile.mkdtemp() egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: + with open(egg_file, 'w') as f: f.write('Name: foo\n') - finally: - f.close() sys.path.append(target) old_ppath = os.environ.get('PYTHONPATH') @@ -202,14 +203,15 @@ class TestUserInstallTest(unittest.TestCase): try: dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.install_dir = target cmd.args = ['foo'] cmd.ensure_finalized() cmd.local_index.scan([new_location]) res = cmd.easy_install('foo') - self.assertEqual(os.path.realpath(res.location), - os.path.realpath(new_location)) + actual = os.path.normcase(os.path.realpath(res.location)) + expected = os.path.normcase(os.path.realpath(new_location)) + assert actual == expected finally: sys.path.remove(target) for basedir in [new_location, target, ]: @@ -224,6 +226,25 @@ class TestUserInstallTest(unittest.TestCase): else: del os.environ['PYTHONPATH'] + @contextlib.contextmanager + def user_install_setup_context(self, *args, **kwargs): + """ + Wrap sandbox.setup_context to patch easy_install in that context to + appear as user-installed. + """ + with self.orig_context(*args, **kwargs): + import setuptools.command.easy_install as ei + ei.__file__ = site.USER_SITE + yield + + def patched_setup_context(self): + self.orig_context = sandbox.setup_context + + return mock.patch( + 'setuptools.sandbox.setup_context', + self.user_install_setup_context, + ) + def test_setup_requires(self): """Regression test for Distribute issue #318 @@ -232,18 +253,37 @@ class TestUserInstallTest(unittest.TestCase): SandboxViolation. """ - test_pkg = create_setup_requires_package(self.dir) + test_pkg = create_setup_requires_package(os.getcwd()) test_setup_py = os.path.join(test_pkg, 'setup.py') try: - with quiet_context(): - with reset_setup_stop_context(): + with contexts.quiet(): + with self.patched_setup_context(): run_setup(test_setup_py, ['install']) - except SandboxViolation: - self.fail('Installation caused SandboxViolation') + except IndexError: + # Test fails in some cases due to bugs in Python + # See https://bitbucket.org/pypa/setuptools/issue/201 + pass -class TestSetupRequires(unittest.TestCase): +@pytest.yield_fixture +def distutils_package(): + distutils_setup_py = SETUP_PY.replace( + 'from setuptools import setup', + 'from distutils.core import setup', + ) + with contexts.tempdir(cd=os.chdir): + with open('setup.py', 'w') as f: + f.write(distutils_setup_py) + yield + + +class TestDistutilsPackage: + def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): + run_setup('setup.py', ['bdist_egg']) + + +class TestSetupRequires: def test_setup_requires_honors_fetch_params(self): """ @@ -260,25 +300,27 @@ class TestSetupRequires(unittest.TestCase): # Some platforms (Jython) don't find a port to which to bind, # so skip this test for them. return - with quiet_context(): + with contexts.quiet(): # create an sdist that has a build-time dependency. with TestSetupRequires.create_sdist() as dist_file: - with tempdir_context() as temp_install_dir: - with environment_context(PYTHONPATH=temp_install_dir): - ei_params = ['--index-url', p_index.url, + with contexts.tempdir() as temp_install_dir: + with contexts.environment(PYTHONPATH=temp_install_dir): + ei_params = [ + '--index-url', p_index.url, '--allow-hosts', p_index_loc, - '--exclude-scripts', '--install-dir', temp_install_dir, - dist_file] - with reset_setup_stop_context(): - with argv_context(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - self.assertRaises(SystemExit, - easy_install_pkg.main, ei_params) + '--exclude-scripts', + '--install-dir', temp_install_dir, + dist_file, + ] + with contexts.argv(['easy_install']): + # attempt to install the dist. It should fail because + # it doesn't exist. + with pytest.raises(SystemExit): + easy_install_pkg.main(ei_params) # there should have been two or three requests to the server # (three happens on Python 3.3a) - self.assertTrue(2 <= len(p_index.requests) <= 3) - self.assertEqual(p_index.requests[0].path, '/does-not-exist/') + assert 2 <= len(p_index.requests) <= 3 + assert p_index.requests[0].path == '/does-not-exist/' @staticmethod @contextlib.contextmanager @@ -287,18 +329,17 @@ class TestSetupRequires(unittest.TestCase): Return an sdist with a setup_requires dependency (of something that doesn't exist) """ - with tempdir_context() as dir: + with contexts.tempdir() as dir: dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_trivial_sdist( - dist_path, - textwrap.dedent(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """).lstrip()) + script = DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-fetcher", + version="1.0", + setup_requires = ['does-not-exist'], + ) + """) + make_trivial_sdist(dist_path, script) yield dist_path def test_setup_requires_overrides_version_conflict(self): @@ -316,22 +357,17 @@ class TestSetupRequires(unittest.TestCase): working_set.add(fake_dist) try: - with tempdir_context() as temp_dir: + with contexts.tempdir() as temp_dir: test_pkg = create_setup_requires_package(temp_dir) test_setup_py = os.path.join(test_pkg, 'setup.py') - with quiet_context() as (stdout, stderr): - with reset_setup_stop_context(): - try: - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, ['--name']) - except VersionConflict: - self.fail('Installing setup.py requirements ' - 'caused a VersionConflict') + with contexts.quiet() as (stdout, stderr): + # Don't even need to install the package, just + # running the setup.py at all is sufficient + run_setup(test_setup_py, ['--name']) lines = stdout.readlines() - self.assertTrue(len(lines) > 0) - self.assertTrue(lines[-1].strip(), 'test_pkg') + assert len(lines) > 0 + assert lines[-1].strip(), 'test_pkg' finally: pkg_resources.__setstate__(pr_state) @@ -352,17 +388,16 @@ def create_setup_requires_package(path): test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) - f = open(test_setup_py, 'w') - f.write(textwrap.dedent("""\ - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - f.close() + with open(test_setup_py, 'w') as f: + f.write(DALS(""" + import setuptools + setuptools.setup(**%r) + """ % test_setup_attrs)) foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') make_trivial_sdist( foobar_path, - textwrap.dedent("""\ + DALS(""" import setuptools setuptools.setup( name='foobar', @@ -386,71 +421,127 @@ def make_trivial_sdist(dist_path, setup_py): MemFile = StringIO setup_py_bytes = MemFile(setup_py.encode('utf-8')) setup_py_file.size = len(setup_py_bytes.getvalue()) - dist = tarfile.open(dist_path, 'w:gz') - try: + with tarfile_open(dist_path, 'w:gz') as dist: dist.addfile(setup_py_file, fileobj=setup_py_bytes) - finally: - dist.close() -@contextlib.contextmanager -def tempdir_context(cd=lambda dir:None): - temp_dir = tempfile.mkdtemp() - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) +class TestScriptHeader: + non_ascii_exe = '/Users/José/bin/python' + exe_with_spaces = r'C:\Program Files\Python33\python.exe' -@contextlib.contextmanager -def environment_context(**updates): - old_env = os.environ.copy() - os.environ.update(updates) - try: - yield - finally: - for key in updates: - del os.environ[key] - os.environ.update(old_env) + @pytest.mark.skipif( + sys.platform.startswith('java') and ei.is_sh(sys.executable), + reason="Test cannot run under java when executable is sh" + ) + def test_get_script_header(self): + expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) + actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python') + assert actual == expected -@contextlib.contextmanager -def argv_context(repl): - old_argv = sys.argv[:] - sys.argv[:] = repl - yield - sys.argv[:] = old_argv + expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath + (sys.executable)) + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x') + assert actual == expected -@contextlib.contextmanager -def reset_setup_stop_context(): - """ - When the setuptools tests are run using setup.py test, and then - one wants to invoke another setup() command (such as easy_install) - within those tests, it's necessary to reset the global variable - in distutils.core so that the setup() command will run naturally. - """ - setup_stop_after = distutils.core._setup_stop_after - distutils.core._setup_stop_after = None - yield - distutils.core._setup_stop_after = setup_stop_after + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable=self.non_ascii_exe) + expected = '#!%s -x\n' % self.non_ascii_exe + assert actual == expected + + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable='"'+self.exe_with_spaces+'"') + expected = '#!"%s"\n' % self.exe_with_spaces + assert actual == expected + + @pytest.mark.xfail( + compat.PY3 and os.environ.get("LC_CTYPE") in ("C", "POSIX"), + reason="Test fails in this locale on Python 3" + ) + @mock.patch.dict(sys.modules, java=mock.Mock(lang=mock.Mock(System= + mock.Mock(getProperty=mock.Mock(return_value=""))))) + @mock.patch('sys.platform', 'java1.5.0_13') + def test_get_script_header_jython_workaround(self, tmpdir): + # Create a mock sys.executable that uses a shebang line + header = DALS(""" + #!/usr/bin/python + # -*- coding: utf-8 -*- + """) + exe = tmpdir / 'exe.py' + with exe.open('w') as f: + f.write(header) + exe = str(exe) + + header = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python', + executable=exe) + assert header == '#!/usr/bin/env %s\n' % exe + + expect_out = 'stdout' if sys.version_info < (2,7) else 'stderr' + + with contexts.quiet() as (stdout, stderr): + # When options are included, generate a broken shebang line + # with a warning emitted + candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x', + executable=exe) + assert candidate == '#!%s -x\n' % exe + output = locals()[expect_out] + assert 'Unable to adapt shebang line' in output.getvalue() + + with contexts.quiet() as (stdout, stderr): + candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable=self.non_ascii_exe) + assert candidate == '#!%s -x\n' % self.non_ascii_exe + output = locals()[expect_out] + assert 'Unable to adapt shebang line' in output.getvalue() -@contextlib.contextmanager -def quiet_context(): - """ - Redirect stdout/stderr to StringIO objects to prevent console output from - distutils commands. - """ +class TestCommandSpec: + def test_custom_launch_command(self): + """ + Show how a custom CommandSpec could be used to specify a #! executable + which takes parameters. + """ + cmd = ei.CommandSpec(['/usr/bin/env', 'python3']) + assert cmd.as_header() == '#!/usr/bin/env python3\n' - old_stdout = sys.stdout - old_stderr = sys.stderr - new_stdout = sys.stdout = StringIO() - new_stderr = sys.stderr = StringIO() - try: - yield new_stdout, new_stderr - finally: - new_stdout.seek(0) - new_stderr.seek(0) - sys.stdout = old_stdout - sys.stderr = old_stderr + def test_from_param_for_CommandSpec_is_passthrough(self): + """ + from_param should return an instance of a CommandSpec + """ + cmd = ei.CommandSpec(['python']) + cmd_new = ei.CommandSpec.from_param(cmd) + assert cmd is cmd_new + + def test_from_environment_with_spaces_in_executable(self): + with mock.patch('sys.executable', TestScriptHeader.exe_with_spaces): + cmd = ei.CommandSpec.from_environment() + assert len(cmd) == 1 + assert cmd.as_header().startswith('#!"') + + def test_from_simple_string_uses_shlex(self): + """ + In order to support `executable = /usr/bin/env my-python`, make sure + from_param invokes shlex on that input. + """ + cmd = ei.CommandSpec.from_param('/usr/bin/env my-python') + assert len(cmd) == 2 + assert '"' not in cmd.as_header() + + def test_sys_executable(self): + """ + CommandSpec.from_string(sys.executable) should contain just that param. + """ + writer = ei.ScriptWriter.best() + cmd = writer.command_spec_class.from_string(sys.executable) + assert len(cmd) == 1 + assert cmd[0] == sys.executable + + +class TestWindowsScriptWriter: + def test_header(self): + hdr = ei.WindowsScriptWriter.get_script_header('') + assert hdr.startswith('#!') + assert hdr.endswith('\n') + hdr = hdr.lstrip('#!') + hdr = hdr.rstrip('\n') + # header should not start with an escaped quote + assert not hdr.startswith('\\"') diff --git a/awx/lib/site-packages/setuptools/tests/test_egg_info.py b/awx/lib/site-packages/setuptools/tests/test_egg_info.py index 278543662f..a1caf9fd30 100644 --- a/awx/lib/site-packages/setuptools/tests/test_egg_info.py +++ b/awx/lib/site-packages/setuptools/tests/test_egg_info.py @@ -1,173 +1,98 @@ - import os -import sys -import tempfile -import shutil -import unittest +import stat -import pkg_resources -import warnings -from setuptools.command import egg_info -from setuptools import svn_utils -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf +import pytest -ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') -"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" +from . import environment +from .textwrap import DALS +from . import contexts -class TestEggInfo(unittest.TestCase): +class TestEggInfo: - def setUp(self): - self.test_dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.test_dir, '.svn')) + setup_script = DALS(""" + from setuptools import setup - self.old_cwd = os.getcwd() - os.chdir(self.test_dir) + setup( + name='foo', + py_modules=['hello'], + entry_points={'console_scripts': ['hi = hello.run']}, + zip_safe=False, + ) + """) - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.test_dir) + def _create_project(self): + with open('setup.py', 'w') as f: + f.write(self.setup_script) - def _write_entries(self, entries): - fn = os.path.join(self.test_dir, '.svn', 'entries') - entries_f = open(fn, 'wb') - entries_f.write(entries) - entries_f.close() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_version_10_format(self): - """ - """ - #keeping this set for 1.6 is a good check on the get_svn_revision - #to ensure I return using svnversion what would had been returned - version_str = svn_utils.SvnInfo.get_svn_version() - version = [int(x) for x in version_str.split('.')[:2]] - if version != [1, 6]: - if hasattr(self, 'skipTest'): - self.skipTest('') - else: - sys.stderr.write('\n Skipping due to SVN Version\n') - return + with open('hello.py', 'w') as f: + f.write(DALS(""" + def run(): + print('hello') + """)) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - self.assertEqual(rev, '89000') + @pytest.yield_fixture + def env(self): + class Environment(str): pass - def test_version_10_format_legacy_parser(self): - """ - """ - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env + with contexts.tempdir(prefix='setuptools-test.') as env_dir: + env = Environment(env_dir) + os.chmod(env_dir, stat.S_IRWXU) + subs = 'home', 'lib', 'scripts', 'data', 'egg-base' + env.paths = dict( + (dirname, os.path.join(env_dir, dirname)) + for dirname in subs + ) + list(map(os.mkdir, env.paths.values())) + config = os.path.join(env.paths['home'], '.pydistutils.cfg') + with open(config, 'w') as f: + f.write(DALS(""" + [egg_info] + egg-base = %(egg-base)s + """ % env.paths + )) + yield env - if path_variable: - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - #catch_warnings not available until py26 - warning_filters = warnings.filters - warnings.filters = warning_filters[:] - try: - warnings.simplefilter("ignore", DeprecationWarning) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - finally: - #restore the warning filters - warnings.filters = warning_filters - #restore the os path - if path_variable: - os.environ[path_variable] = old_path + def test_egg_base_installed_egg_info(self, tmpdir_cwd, env): + self._create_project() - self.assertEqual(rev, '89000') - -DUMMY_SOURCE_TXT = """CHANGES.txt -CONTRIBUTORS.txt -HISTORY.txt -LICENSE -MANIFEST.in -README.txt -setup.py -dummy/__init__.py -dummy/test.txt -dummy.egg-info/PKG-INFO -dummy.egg-info/SOURCES.txt -dummy.egg-info/dependency_links.txt -dummy.egg-info/top_level.txt""" - - -class TestSvnDummy(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return None - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummy, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=1) + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + cmd = [ + 'install', + '--home', env.paths['home'], + '--install-lib', env.paths['lib'], + '--install-scripts', env.paths['scripts'], + '--install-data', env.paths['data'], + ] + code, data = environment.run_setup_py( + cmd=cmd, + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) if code: raise AssertionError(data) - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile + actual = self._find_egg_info_files(env.paths['lib']) - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) + expected = [ + 'PKG-INFO', + 'SOURCES.txt', + 'dependency_links.txt', + 'entry_points.txt', + 'not-zip-safe', + 'top_level.txt', + ] + assert sorted(actual) == expected - return data - - -class TestSvnDummyLegacy(environment.ZippedEnvironment): - - def setUp(self): - self.base_version = (1, 6) - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummyLegacy, self).setUp() - - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - path="", - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) + def _find_egg_info_files(self, root): + results = ( + filenames + for dirpath, dirnames, filenames in os.walk(root) + if os.path.basename(dirpath) == 'EGG-INFO' + ) + # expect exactly one result + result, = results + return result diff --git a/awx/lib/site-packages/setuptools/tests/test_find_packages.py b/awx/lib/site-packages/setuptools/tests/test_find_packages.py new file mode 100644 index 0000000000..06a7c02e46 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_find_packages.py @@ -0,0 +1,170 @@ +"""Tests for setuptools.find_packages().""" +import os +import sys +import shutil +import tempfile +import platform + +import pytest + +import setuptools +from setuptools import find_packages + +find_420_packages = setuptools.PEP420PackageFinder.find + +# modeled after CPython's test.support.can_symlink +def can_symlink(): + TESTFN = tempfile.mktemp() + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + globals().update(can_symlink=lambda: can) + return can + +def has_symlink(): + bad_symlink = ( + # Windows symlink directory detection is broken on Python 3.2 + platform.system() == 'Windows' and sys.version_info[:2] == (3,2) + ) + return can_symlink() and not bad_symlink + +class TestFindPackages: + + def setup_method(self, method): + self.dist_dir = tempfile.mkdtemp() + self._make_pkg_structure() + + def teardown_method(self, method): + shutil.rmtree(self.dist_dir) + + def _make_pkg_structure(self): + """Make basic package structure. + + dist/ + docs/ + conf.py + pkg/ + __pycache__/ + nspkg/ + mod.py + subpkg/ + assets/ + asset + __init__.py + setup.py + + """ + self.docs_dir = self._mkdir('docs', self.dist_dir) + self._touch('conf.py', self.docs_dir) + self.pkg_dir = self._mkdir('pkg', self.dist_dir) + self._mkdir('__pycache__', self.pkg_dir) + self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) + self._touch('mod.py', self.ns_pkg_dir) + self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) + self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) + self._touch('asset', self.asset_dir) + self._touch('__init__.py', self.sub_pkg_dir) + self._touch('setup.py', self.dist_dir) + + def _mkdir(self, path, parent_dir=None): + if parent_dir: + path = os.path.join(parent_dir, path) + os.mkdir(path) + return path + + def _touch(self, path, dir_=None): + if dir_: + path = os.path.join(dir_, path) + fp = open(path, 'w') + fp.close() + return path + + def test_regular_package(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir) + assert packages == ['pkg', 'pkg.subpkg'] + + def test_exclude(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg.*',)) + assert packages == ['pkg'] + + def test_include_excludes_other(self): + """ + If include is specified, other packages should be excluded. + """ + self._touch('__init__.py', self.pkg_dir) + alt_dir = self._mkdir('other_pkg', self.dist_dir) + self._touch('__init__.py', alt_dir) + packages = find_packages(self.dist_dir, include=['other_pkg']) + assert packages == ['other_pkg'] + + def test_dir_with_dot_is_skipped(self): + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + data_dir = self._mkdir('some.data', self.pkg_dir) + self._touch('__init__.py', data_dir) + self._touch('file.dat', data_dir) + packages = find_packages(self.dist_dir) + assert 'pkg.some.data' not in packages + + def test_dir_with_packages_in_subdir_is_excluded(self): + """ + Ensure that a package in a non-package such as build/pkg/__init__.py + is excluded. + """ + build_dir = self._mkdir('build', self.dist_dir) + build_pkg_dir = self._mkdir('pkg', build_dir) + self._touch('__init__.py', build_pkg_dir) + packages = find_packages(self.dist_dir) + assert 'build.pkg' not in packages + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self): + """ + A symbolically-linked directory should be treated like any other + directory when matched as a package. + + Create a link from lpkg -> pkg. + """ + self._touch('__init__.py', self.pkg_dir) + linked_pkg = os.path.join(self.dist_dir, 'lpkg') + os.symlink('pkg', linked_pkg) + assert os.path.isdir(linked_pkg) + packages = find_packages(self.dist_dir) + assert 'lpkg' in packages + + def _assert_packages(self, actual, expected): + assert set(actual) == set(expected) + + def test_pep420_ns_package(self): + packages = find_420_packages( + self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes(self): + packages = find_420_packages( + self.dist_dir, exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes_or_excludes(self): + packages = find_420_packages(self.dist_dir) + expected = [ + 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] + self._assert_packages(packages, expected) + + def test_regular_package_with_nested_pep420_ns_packages(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_420_packages( + self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_non_package_dirs(self): + shutil.rmtree(self.docs_dir) + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + packages = find_420_packages(self.dist_dir) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) diff --git a/awx/lib/site-packages/setuptools/tests/test_integration.py b/awx/lib/site-packages/setuptools/tests/test_integration.py new file mode 100644 index 0000000000..90bb43136c --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_integration.py @@ -0,0 +1,99 @@ +"""Run some integration tests. + +Try to install a few packages. +""" + +import glob +import os +import sys + +import pytest + +from setuptools.command.easy_install import easy_install +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution +from setuptools.compat import urlopen + + +def setup_module(module): + packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient' + for pkg in packages: + try: + __import__(pkg) + tmpl = "Integration tests cannot run when {pkg} is installed" + pytest.skip(tmpl.format(**locals())) + except ImportError: + pass + + try: + urlopen('https://pypi.python.org/pypi') + except Exception as exc: + pytest.skip(reason=str(exc)) + + +@pytest.fixture +def install_context(request, tmpdir, monkeypatch): + """Fixture to set up temporary installation directory. + """ + # Save old values so we can restore them. + new_cwd = tmpdir.mkdir('cwd') + user_base = tmpdir.mkdir('user_base') + user_site = tmpdir.mkdir('user_site') + install_dir = tmpdir.mkdir('install_dir') + + def fin(): + # undo the monkeypatch, particularly needed under + # windows because of kept handle on cwd + monkeypatch.undo() + new_cwd.remove() + user_base.remove() + user_site.remove() + install_dir.remove() + request.addfinalizer(fin) + + # Change the environment and site settings to control where the + # files are installed and ensure we do not overwrite anything. + monkeypatch.chdir(new_cwd) + monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath) + monkeypatch.setattr('site.USER_BASE', user_base.strpath) + monkeypatch.setattr('site.USER_SITE', user_site.strpath) + monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) + monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path)) + + # Set up the command for performing the installation. + dist = Distribution() + cmd = easy_install(dist) + cmd.install_dir = install_dir.strpath + return cmd + + +def _install_one(requirement, cmd, pkgname, modulename): + cmd.args = [requirement] + cmd.ensure_finalized() + cmd.run() + target = cmd.install_dir + dest_path = glob.glob(os.path.join(target, pkgname + '*.egg')) + assert dest_path + assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename)) + + +def test_stevedore(install_context): + _install_one('stevedore', install_context, + 'stevedore', 'extension.py') + + +@pytest.mark.xfail +def test_virtualenvwrapper(install_context): + _install_one('virtualenvwrapper', install_context, + 'virtualenvwrapper', 'hook_loader.py') + + +def test_pbr(install_context): + _install_one('pbr', install_context, + 'pbr', 'core.py') + + +@pytest.mark.xfail +def test_python_novaclient(install_context): + _install_one('python-novaclient', install_context, + 'novaclient', 'base.py') diff --git a/awx/lib/site-packages/setuptools/tests/test_markerlib.py b/awx/lib/site-packages/setuptools/tests/test_markerlib.py index dae71cba46..8197b49dc4 100644 --- a/awx/lib/site-packages/setuptools/tests/test_markerlib.py +++ b/awx/lib/site-packages/setuptools/tests/test_markerlib.py @@ -1,48 +1,43 @@ import os -import unittest -from setuptools.tests.py26compat import skipIf -try: - import ast -except ImportError: - pass +import pytest -class TestMarkerlib(unittest.TestCase): - @skipIf('ast' not in globals(), - "ast not available (Python < 2.6?)") +class TestMarkerlib: + + @pytest.mark.importorskip('ast') def test_markers(self): from _markerlib import interpret, default_environment, compile - + os_name = os.name - - self.assertTrue(interpret("")) - - self.assertTrue(interpret("os.name != 'buuuu'")) - self.assertTrue(interpret("os_name != 'buuuu'")) - self.assertTrue(interpret("python_version > '1.0'")) - self.assertTrue(interpret("python_version < '5.0'")) - self.assertTrue(interpret("python_version <= '5.0'")) - self.assertTrue(interpret("python_version >= '1.0'")) - self.assertTrue(interpret("'%s' in os.name" % os_name)) - self.assertTrue(interpret("'%s' in os_name" % os_name)) - self.assertTrue(interpret("'buuuu' not in os.name")) - - self.assertFalse(interpret("os.name == 'buuuu'")) - self.assertFalse(interpret("os_name == 'buuuu'")) - self.assertFalse(interpret("python_version < '1.0'")) - self.assertFalse(interpret("python_version > '5.0'")) - self.assertFalse(interpret("python_version >= '5.0'")) - self.assertFalse(interpret("python_version <= '1.0'")) - self.assertFalse(interpret("'%s' not in os.name" % os_name)) - self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) - self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) - + + assert interpret("") + + assert interpret("os.name != 'buuuu'") + assert interpret("os_name != 'buuuu'") + assert interpret("python_version > '1.0'") + assert interpret("python_version < '5.0'") + assert interpret("python_version <= '5.0'") + assert interpret("python_version >= '1.0'") + assert interpret("'%s' in os.name" % os_name) + assert interpret("'%s' in os_name" % os_name) + assert interpret("'buuuu' not in os.name") + + assert not interpret("os.name == 'buuuu'") + assert not interpret("os_name == 'buuuu'") + assert not interpret("python_version < '1.0'") + assert not interpret("python_version > '5.0'") + assert not interpret("python_version >= '5.0'") + assert not interpret("python_version <= '1.0'") + assert not interpret("'%s' not in os.name" % os_name) + assert not interpret("'buuuu' in os.name and python_version >= '5.0'") + assert not interpret("'buuuu' in os_name and python_version >= '5.0'") + environment = default_environment() environment['extra'] = 'test' - self.assertTrue(interpret("extra == 'test'", environment)) - self.assertFalse(interpret("extra == 'doc'", environment)) - + assert interpret("extra == 'test'", environment) + assert not interpret("extra == 'doc'", environment) + def raises_nameError(): try: interpret("python.version == '42'") @@ -50,9 +45,9 @@ class TestMarkerlib(unittest.TestCase): pass else: raise Exception("Expected NameError") - + raises_nameError() - + def raises_syntaxError(): try: interpret("(x for x in (4,))") @@ -60,9 +55,9 @@ class TestMarkerlib(unittest.TestCase): pass else: raise Exception("Expected SyntaxError") - + raises_syntaxError() - + statement = "python_version == '5'" - self.assertEqual(compile(statement).__doc__, statement) - + assert compile(statement).__doc__ == statement + diff --git a/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py b/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py new file mode 100644 index 0000000000..09e0460c56 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py @@ -0,0 +1,179 @@ +""" +Tests for msvc9compiler. +""" + +import os +import contextlib +import distutils.errors + +import pytest +try: + from unittest import mock +except ImportError: + import mock + +from . import contexts + +# importing only setuptools should apply the patch +__import__('setuptools') + +pytest.importorskip("distutils.msvc9compiler") + + +def mock_reg(hkcu=None, hklm=None): + """ + Return a mock for distutils.msvc9compiler.Reg, patched + to mock out the functions that access the registry. + """ + + _winreg = getattr(distutils.msvc9compiler, '_winreg', None) + winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) + + hives = { + winreg.HKEY_CURRENT_USER: hkcu or {}, + winreg.HKEY_LOCAL_MACHINE: hklm or {}, + } + + @classmethod + def read_keys(cls, base, key): + """Return list of registry keys.""" + hive = hives.get(base, {}) + return [ + k.rpartition('\\')[2] + for k in hive if k.startswith(key.lower()) + ] + + @classmethod + def read_values(cls, base, key): + """Return dict of registry keys and values.""" + hive = hives.get(base, {}) + return dict( + (k.rpartition('\\')[2], hive[k]) + for k in hive if k.startswith(key.lower()) + ) + + return mock.patch.multiple(distutils.msvc9compiler.Reg, + read_keys=read_keys, read_values=read_values) + + +class TestModulePatch: + """ + Ensure that importing setuptools is sufficient to replace + the standard find_vcvarsall function with a version that + recognizes the "Visual C++ for Python" package. + """ + + key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' + key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir' + + def test_patched(self): + "Test the module is actually patched" + mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ + assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched" + + def test_no_registry_entryies_means_nothing_found(self): + """ + No registry entries or environment variable should lead to an error + directing the user to download vcpython27. + """ + find_vcvarsall = distutils.msvc9compiler.find_vcvarsall + query_vcvarsall = distutils.msvc9compiler.query_vcvarsall + + with contexts.environment(VS90COMNTOOLS=None): + with mock_reg(): + assert find_vcvarsall(9.0) is None + + expected = distutils.errors.DistutilsPlatformError + with pytest.raises(expected) as exc: + query_vcvarsall(9.0) + assert 'aka.ms/vcpython27' in str(exc) + + @pytest.yield_fixture + def user_preferred_setting(self): + """ + Set up environment with different install dirs for user vs. system + and yield the user_install_dir for the expected result. + """ + with self.mock_install_dir() as user_install_dir: + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hkcu={ + self.key_32: user_install_dir, + }, + hklm={ + self.key_32: system_install_dir, + self.key_64: system_install_dir, + }, + ) + with reg: + yield user_install_dir + + def test_prefer_current_user(self, user_preferred_setting): + """ + Ensure user's settings are preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(user_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.yield_fixture + def local_machine_setting(self): + """ + Set up environment with only the system environment configured. + """ + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hklm={ + self.key_32: system_install_dir, + }, + ) + with reg: + yield system_install_dir + + def test_local_machine_recognized(self, local_machine_setting): + """ + Ensure machine setting is honored if user settings are not present. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(local_machine_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.yield_fixture + def x64_preferred_setting(self): + """ + Set up environment with 64-bit and 32-bit system settings configured + and yield the canonical location. + """ + with self.mock_install_dir() as x32_dir: + with self.mock_install_dir() as x64_dir: + reg = mock_reg( + hklm={ + # This *should* only exist on 32-bit machines + self.key_32: x32_dir, + # This *should* only exist on 64-bit machines + self.key_64: x64_dir, + }, + ) + with reg: + yield x32_dir + + def test_ensure_64_bit_preferred(self, x64_preferred_setting): + """ + Ensure 64-bit system key is preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @staticmethod + @contextlib.contextmanager + def mock_install_dir(): + """ + Make a mock install dir in a unique location so that tests can + distinguish which dir was detected in a given scenario. + """ + with contexts.tempdir() as result: + vcvarsall = os.path.join(result, 'vcvarsall.bat') + with open(vcvarsall, 'w'): + pass + yield result diff --git a/awx/lib/site-packages/setuptools/tests/test_packageindex.py b/awx/lib/site-packages/setuptools/tests/test_packageindex.py index 664566a36c..dcd90d6fe7 100644 --- a/awx/lib/site-packages/setuptools/tests/test_packageindex.py +++ b/awx/lib/site-packages/setuptools/tests/test_packageindex.py @@ -1,26 +1,24 @@ -"""Package Index Tests -""" import sys -import os -import unittest -import pkg_resources -from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url import distutils.errors + +from setuptools.compat import httplib, HTTPError, unicode, pathname2url + +import pkg_resources import setuptools.package_index from setuptools.tests.server import IndexServer -class TestPackageIndex(unittest.TestCase): + +class TestPackageIndex: def test_bad_url_bad_port(self): index = setuptools.package_index.PackageIndex() url = 'http://127.0.0.1:0/nonesuch/test_package_index' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) + except Exception as v: + assert url in str(v) else: - self.assertTrue(isinstance(v, HTTPError)) + assert isinstance(v, HTTPError) def test_bad_url_typo(self): # issue 16 @@ -33,11 +31,10 @@ class TestPackageIndex(unittest.TestCase): url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) + except Exception as v: + assert url in str(v) else: - self.assertTrue(isinstance(v, HTTPError)) + assert isinstance(v, HTTPError) def test_bad_url_bad_status_line(self): index = setuptools.package_index.PackageIndex( @@ -51,9 +48,8 @@ class TestPackageIndex(unittest.TestCase): url = 'http://example.com' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue('line' in str(v)) + except Exception as v: + assert 'line' in str(v) else: raise AssertionError('Should have raise here!') @@ -69,8 +65,7 @@ class TestPackageIndex(unittest.TestCase): url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' try: index.open_url(url) - except distutils.errors.DistutilsError: - error = sys.exc_info()[1] + except distutils.errors.DistutilsError as error: msg = unicode(error) assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg return @@ -94,7 +89,7 @@ class TestPackageIndex(unittest.TestCase): hosts=('www.example.com',) ) url = 'file:///tmp/test_package_index' - self.assertTrue(index.url_ok(url, True)) + assert index.url_ok(url, True) def test_links_priority(self): """ @@ -127,21 +122,30 @@ class TestPackageIndex(unittest.TestCase): server.stop() # the distribution has been found - self.assertTrue('foobar' in pi) + assert 'foobar' in pi # we have only one link, because links are compared without md5 - self.assertTrue(len(pi['foobar'])==1) + assert len(pi['foobar'])==1 # the link should be from the index - self.assertTrue('correct_md5' in pi['foobar'][0].location) + assert 'correct_md5' in pi['foobar'][0].location def test_parse_bdist_wininst(self): - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) + parse = setuptools.package_index.parse_bdist_wininst + + actual = parse('reportlab-2.5.win32-py2.4.exe') + expected = 'reportlab-2.5', '2.4', 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win32.exe') + expected = 'reportlab-2.5', None, 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64-py2.7.exe') + expected = 'reportlab-2.5', '2.7', 'win-amd64' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64.exe') + expected = 'reportlab-2.5', None, 'win-amd64' + assert actual == expected def test__vcs_split_rev_from_url(self): """ @@ -149,55 +153,51 @@ class TestPackageIndex(unittest.TestCase): """ vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url url, rev = vsrfu('https://example.com/bar@2995') - self.assertEqual(url, 'https://example.com/bar') - self.assertEqual(rev, '2995') + assert url == 'https://example.com/bar' + assert rev == '2995' - def test_local_index(self): + def test_local_index(self, tmpdir): """ local_open should be able to read an index from the file system. """ - f = open('index.html', 'w') - f.write('
content
') - f.close() - try: - url = 'file:' + pathname2url(os.getcwd()) + '/' - res = setuptools.package_index.local_open(url) - finally: - os.remove('index.html') + index_file = tmpdir / 'index.html' + with index_file.open('w') as f: + f.write('
content
') + url = 'file:' + pathname2url(str(tmpdir)) + '/' + res = setuptools.package_index.local_open(url) assert 'content' in res.read() -class TestContentCheckers(unittest.TestCase): +class TestContentCheckers: def test_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') checker.feed('You should probably not be using MD5'.encode('ascii')) - self.assertEqual(checker.hash.hexdigest(), - 'f12895fdffbd45007040d2e44df98478') - self.assertTrue(checker.is_valid()) + assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' + assert checker.is_valid() def test_other_fragment(self): "Content checks should succeed silently if no hash is present" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#something%20completely%20different') checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) + assert checker.is_valid() def test_blank_md5(self): "Content checks should succeed if a hash is empty" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=') checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) + assert checker.is_valid() def test_get_hash_name_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - self.assertEqual(checker.hash_name, 'md5') + assert checker.hash_name == 'md5' def test_report(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') rep = checker.report(lambda x: x, 'My message about %s') - self.assertEqual(rep, 'My message about md5') + assert rep == 'My message about md5' diff --git a/awx/lib/site-packages/setuptools/tests/test_sandbox.py b/awx/lib/site-packages/setuptools/tests/test_sandbox.py index 3dad137683..6e1e9e1cd2 100644 --- a/awx/lib/site-packages/setuptools/tests/test_sandbox.py +++ b/awx/lib/site-packages/setuptools/tests/test_sandbox.py @@ -1,69 +1,43 @@ """develop tests """ -import sys import os -import shutil -import unittest -import tempfile import types +import pytest + import pkg_resources import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox, SandboxViolation +from setuptools.sandbox import DirectorySandbox -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True -class TestSandbox(unittest.TestCase): +class TestSandbox: - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) + def test_devnull(self, tmpdir): + sandbox = DirectorySandbox(str(tmpdir)) sandbox.run(self._file_writer(os.devnull)) + @staticmethod def _file_writer(path): def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() + with open(path, 'w') as f: + f.write('xxx') return do_write - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) + def test_win32com(self, tmpdir): + """ + win32com should not be prevented from caching COM interfaces + in gen_py. + """ + win32com = pytest.importorskip('win32com') + gen_py = win32com.__gen_path__ + target = os.path.join(gen_py, 'test_write') + sandbox = DirectorySandbox(str(tmpdir)) + try: + # attempt to create gen_py file + sandbox.run(self._file_writer(target)) + finally: + if os.path.exists(target): + os.remove(target) def test_setup_py_with_BOM(self): """ @@ -72,8 +46,57 @@ class TestSandbox(unittest.TestCase): target = pkg_resources.resource_filename(__name__, 'script-with-bom.py') namespace = types.ModuleType('namespace') - setuptools.sandbox.execfile(target, vars(namespace)) + setuptools.sandbox._execfile(target, vars(namespace)) assert namespace.result == 'passed' -if __name__ == '__main__': - unittest.main() + def test_setup_py_with_CRLF(self, tmpdir): + setup_py = tmpdir / 'setup.py' + with setup_py.open('wb') as stream: + stream.write(b'"degenerate script"\r\n') + setuptools.sandbox._execfile(str(setup_py), globals()) + + +class TestExceptionSaver: + def test_exception_trapped(self): + with setuptools.sandbox.ExceptionSaver(): + raise ValueError("details") + + def test_exception_resumed(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise ValueError("details") + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert str(caught.value) == 'details' + + def test_exception_reconstructed(self): + orig_exc = ValueError("details") + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise orig_exc + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert caught.value is not orig_exc + + def test_no_exception_passes_quietly(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + pass + + saved_exc.resume() + + def test_unpickleable_exception(self): + class CantPickleThis(Exception): + "This Exception is unpickleable because it's not in globals" + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise CantPickleThis('detail') + + with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: + saved_exc.resume() + + assert str(caught.value) == "CantPickleThis('detail',)" diff --git a/awx/lib/site-packages/setuptools/tests/test_sdist.py b/awx/lib/site-packages/setuptools/tests/test_sdist.py index 71d10757bc..9013b505db 100644 --- a/awx/lib/site-packages/setuptools/tests/test_sdist.py +++ b/awx/lib/site-packages/setuptools/tests/test_sdist.py @@ -6,18 +6,16 @@ import os import shutil import sys import tempfile -import unittest import unicodedata -import re -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf +import contextlib -from setuptools.compat import StringIO, unicode -from setuptools.tests.py26compat import skipIf -from setuptools.command.sdist import sdist, walk_revctrl +import pytest + +import pkg_resources +from setuptools.compat import StringIO, unicode, PY3, PY2 +from setuptools.command.sdist import sdist from setuptools.command.egg_info import manifest_maker from setuptools.dist import Distribution -from setuptools import svn_utils SETUP_ATTRS = { 'name': 'sdist_test', @@ -34,32 +32,33 @@ setup(**%r) """ % SETUP_ATTRS -if sys.version_info >= (3,): +if PY3: LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') else: LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' # Cannot use context manager because of Python 2.4 +@contextlib.contextmanager def quiet(): - global old_stdout, old_stderr old_stdout, old_stderr = sys.stdout, sys.stderr sys.stdout, sys.stderr = StringIO(), StringIO() - -def unquiet(): - sys.stdout, sys.stderr = old_stdout, old_stderr + try: + yield + finally: + sys.stdout, sys.stderr = old_stdout, old_stderr # Fake byte literals for Python <= 2.5 def b(s, encoding='utf-8'): - if sys.version_info >= (3,): + if PY3: return s.encode(encoding) return s # Convert to POSIX path def posix(path): - if sys.version_info >= (3,) and not isinstance(path, str): + if PY3 and not isinstance(path, str): return path.replace(os.sep.encode('ascii'), b('/')) else: return path.replace(os.sep, '/') @@ -74,17 +73,18 @@ def decompose(path): path = unicodedata.normalize('NFD', path) path = path.encode('utf-8') except UnicodeError: - pass # Not UTF-8 + pass # Not UTF-8 return path -class TestSdistTest(unittest.TestCase): +class TestSdistTest: - def setUp(self): + def setup_method(self, method): self.temp_dir = tempfile.mkdtemp() f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') f.write(SETUP_PY) f.close() + # Set up the rest of the test package test_pkg = os.path.join(self.temp_dir, 'sdist_test') os.mkdir(test_pkg) @@ -97,7 +97,7 @@ class TestSdistTest(unittest.TestCase): self.old_cwd = os.getcwd() os.chdir(self.temp_dir) - def tearDown(self): + def teardown_method(self, method): os.chdir(self.old_cwd) shutil.rmtree(self.temp_dir) @@ -112,17 +112,40 @@ class TestSdistTest(unittest.TestCase): cmd = sdist(dist) cmd.ensure_finalized() - # squelch output - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() manifest = cmd.filelist.files - self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) + assert os.path.join('sdist_test', 'a.txt') in manifest + assert os.path.join('sdist_test', 'b.txt') in manifest + assert os.path.join('sdist_test', 'c.rst') not in manifest + + + def test_defaults_case_sensitivity(self): + """ + Make sure default files (README.*, etc.) are added in a case-sensitive + way to avoid problems with packages built on Windows. + """ + + open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close() + open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close() + + dist = Distribution(SETUP_ATTRS) + # the extension deliberately capitalized for this test + # to make sure the actual filename (not capitalized) gets added + # to the manifest + dist.script_name = 'setup.PY' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + # lowercase all names so we can test in a case-insensitive way to make sure the files are not included + manifest = map(lambda x: x.lower(), cmd.filelist.files) + assert 'readme.rst' not in manifest, manifest + assert 'setup.py' not in manifest, manifest + assert 'setup.cfg' not in manifest, manifest def test_manifest_is_written_with_utf8_encoding(self): # Test for #303. @@ -135,34 +158,31 @@ class TestSdistTest(unittest.TestCase): # UTF-8 filename filename = os.path.join('sdist_test', 'smörbröd.py') + # Must create the file or it will get stripped. + open(filename, 'w').close() + # Add UTF-8 filename and write manifest - quiet() - try: + with quiet(): mm.run() - mm.filelist.files.append(filename) + mm.filelist.append(filename) mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - u_contents = contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + u_contents = contents.decode('UTF-8') # The manifest should contain the UTF-8 filename - if sys.version_info >= (3,): - self.assertTrue(posix(filename) in u_contents) - else: - self.assertTrue(posix(filename) in contents) + if PY2: + fs_enc = sys.getfilesystemencoding() + filename = filename.decode(fs_enc) + + assert posix(filename) in u_contents # Python 3 only - if sys.version_info >= (3,): + if PY3: def test_write_manifest_allows_utf8_filenames(self): # Test for #303. @@ -175,36 +195,37 @@ class TestSdistTest(unittest.TestCase): # UTF-8 filename filename = os.path.join(b('sdist_test'), b('smörbröd.py')) + # Must touch the file or risk removal + open(filename, "w").close() + # Add filename and write manifest - quiet() - try: + with quiet(): mm.run() u_filename = filename.decode('utf-8') mm.filelist.files.append(u_filename) # Re-write manifest mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + contents.decode('UTF-8') # The manifest should contain the UTF-8 filename - self.assertTrue(posix(filename) in contents) + assert posix(filename) in contents # The filelist should have been updated as well - self.assertTrue(u_filename in mm.filelist.files) + assert u_filename in mm.filelist.files def test_write_manifest_skips_non_utf8_filenames(self): - # Test for #303. + """ + Files that cannot be encoded to UTF-8 (specifically, those that + weren't originally successfully decoded and have surrogate + escapes) should be omitted from the manifest. + See https://bitbucket.org/tarek/distribute/issue/303 for history. + """ dist = Distribution(SETUP_ATTRS) dist.script_name = 'setup.py' mm = manifest_maker(dist) @@ -215,32 +236,25 @@ class TestSdistTest(unittest.TestCase): filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) # Add filename with surrogates and write manifest - quiet() - try: + with quiet(): mm.run() u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.files.append(u_filename) + mm.filelist.append(u_filename) # Re-write manifest mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + contents.decode('UTF-8') # The Latin-1 filename should have been skipped - self.assertFalse(posix(filename) in contents) + assert posix(filename) not in contents # The filelist should have been updated as well - self.assertFalse(u_filename in mm.filelist.files) + assert u_filename not in mm.filelist.files def test_manifest_is_read_with_utf8_encoding(self): # Test for #303. @@ -250,17 +264,14 @@ class TestSdistTest(unittest.TestCase): cmd.ensure_finalized() # Create manifest - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() # Add UTF-8 filename to manifest filename = os.path.join(b('sdist_test'), b('smörbröd.py')) cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) + manifest.write(b('\n') + filename) manifest.close() # The file must exist to be included in the filelist @@ -268,19 +279,16 @@ class TestSdistTest(unittest.TestCase): # Re-read manifest cmd.filelist.files = [] - quiet() - try: + with quiet(): cmd.read_manifest() - finally: - unquiet() # The filelist should contain the UTF-8 filename - if sys.version_info >= (3,): + if PY3: filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files # Python 3 only - if sys.version_info >= (3,): + if PY3: def test_read_manifest_skips_non_utf8_filenames(self): # Test for #303. @@ -290,17 +298,14 @@ class TestSdistTest(unittest.TestCase): cmd.ensure_finalized() # Create manifest - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() # Add Latin-1 filename to manifest filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) + manifest.write(b('\n') + filename) manifest.close() # The file must exist to be included in the filelist @@ -308,22 +313,16 @@ class TestSdistTest(unittest.TestCase): # Re-read manifest cmd.filelist.files = [] - quiet() - try: - try: - cmd.read_manifest() - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - finally: - unquiet() + with quiet(): + cmd.read_manifest() # The Latin-1 filename should have been skipped filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) + assert filename not in cmd.filelist.files - @skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8', - 'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') + @pytest.mark.skipif(PY3 and locale.getpreferredencoding() != 'UTF-8', + reason='Unittest fails if locale is not utf-8 but the manifests is ' + 'recorded correctly') def test_sdist_with_utf8_encoded_filename(self): # Test for #303. dist = Distribution(SETUP_ATTRS) @@ -335,31 +334,28 @@ class TestSdistTest(unittest.TestCase): filename = os.path.join(b('sdist_test'), b('smörbröd.py')) open(filename, 'w').close() - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() if sys.platform == 'darwin': filename = decompose(filename) - if sys.version_info >= (3,): + if PY3: fs_enc = sys.getfilesystemencoding() if sys.platform == 'win32': if fs_enc == 'cp1252': # Python 3 mangles the UTF-8 filename filename = filename.decode('cp1252') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: filename = filename.decode('mbcs') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files def test_sdist_with_latin1_encoded_filename(self): # Test for #303. @@ -371,16 +367,13 @@ class TestSdistTest(unittest.TestCase): # Latin-1 filename filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) open(filename, 'w').close() - self.assertTrue(os.path.isfile(filename)) + assert os.path.isfile(filename) - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() - if sys.version_info >= (3,): - #not all windows systems have a default FS encoding of cp1252 + if PY3: + # not all windows systems have a default FS encoding of cp1252 if sys.platform == 'win32': # Latin-1 is similar to Windows-1252 however # on mbcs filesys it is not in latin-1 encoding @@ -390,146 +383,37 @@ class TestSdistTest(unittest.TestCase): else: filename = filename.decode('latin-1') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: # The Latin-1 filename should have been skipped filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) + filename not in cmd.filelist.files else: - # No conversion takes place under Python 2 and the file - # is included. We shall keep it that way for BBB. - self.assertTrue(filename in cmd.filelist.files) + # Under Python 2 there seems to be no decoded string in the + # filelist. However, due to decode and encoding of the + # file name to get utf-8 Manifest the latin1 maybe excluded + try: + # fs_enc should match how one is expect the decoding to + # be proformed for the manifest output. + fs_enc = sys.getfilesystemencoding() + filename.decode(fs_enc) + assert filename in cmd.filelist.files + except UnicodeDecodeError: + filename not in cmd.filelist.files -class TestDummyOutput(environment.ZippedEnvironment): +def test_default_revctrl(): + """ + When _default_revctrl was removed from the `setuptools.command.sdist` + module in 10.0, it broke some systems which keep an old install of + setuptools (Distribute) around. Those old versions require that the + setuptools package continue to implement that interface, so this + function provides that interface, stubbed. See #320 for details. - def setUp(self): - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', "dummy.zip") - self.dataname = "dummy" - super(TestDummyOutput, self).setUp() - - def _run(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=0) - if code: - info = "DIR: " + os.path.abspath('.') - info += "\n SDIST RETURNED: %i\n\n" % code - info += data - raise AssertionError(info) - - datalines = data.splitlines() - - possible = ( - "running sdist", - "running egg_info", - "creating dummy\.egg-info", - "writing dummy\.egg-info", - "writing top-level names to dummy\.egg-info", - "writing dependency_links to dummy\.egg-info", - "writing manifest file 'dummy\.egg-info", - "reading manifest file 'dummy\.egg-info", - "reading manifest template 'MANIFEST\.in'", - "writing manifest file 'dummy\.egg-info", - "creating dummy-0.1.1", - "making hard links in dummy-0\.1\.1", - "copying files to dummy-0\.1\.1", - "copying \S+ -> dummy-0\.1\.1", - "copying dummy", - "copying dummy\.egg-info", - "hard linking \S+ -> dummy-0\.1\.1", - "hard linking dummy", - "hard linking dummy\.egg-info", - "Writing dummy-0\.1\.1", - "creating dist", - "creating 'dist", - "Creating tar archive", - "running check", - "adding 'dummy-0\.1\.1", - "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", - "gzip .+ dist/dummy-0\.1\.1\.tar", - "removing 'dummy-0\.1\.1' \\(and everything under it\\)", - ) - - print(" DIR: " + os.path.abspath('.')) - for line in datalines: - found = False - for pattern in possible: - if re.match(pattern, line): - print(" READ: " + line) - found = True - break - if not found: - raise AssertionError("Unexpexected: %s\n-in-\n%s" - % (line, data)) - - return data - - def test_sources(self): - self._run() - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_walksvn(self): - if self.base_version >= (1, 6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - #TODO is this right - expected = set([ - os.path.join('a file'), - os.path.join(folder2, 'Changes.txt'), - os.path.join(folder2, 'MD5SUMS'), - os.path.join(folder2, 'README.txt'), - os.path.join(folder3, 'Changes.txt'), - os.path.join(folder3, 'MD5SUMS'), - os.path.join(folder3, 'README.txt'), - os.path.join(folder3, 'TODO.txt'), - os.path.join(folder3, 'fin'), - os.path.join('third_party', 'README.txt'), - os.path.join('folder', folder2, 'Changes.txt'), - os.path.join('folder', folder2, 'MD5SUMS'), - os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'Changes.txt'), - os.path.join('folder', folder3, 'fin'), - os.path.join('folder', folder3, 'MD5SUMS'), - os.path.join('folder', folder3, 'oops'), - os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'ZuMachen.txt'), - os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), - os.path.join('folder', 'lalala.txt'), - os.path.join('folder', 'quest.txt'), - # The example will have a deleted file - # (or should) but shouldn't return it - ]) - self.assertEqual(set(x for x in walk_revctrl()), expected) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) + This interface must be maintained until Ubuntu 12.04 is no longer + supported (by Setuptools). + """ + ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' + ep = pkg_resources.EntryPoint.parse(ep_def) + res = ep.resolve() + assert hasattr(res, '__iter__') diff --git a/awx/lib/site-packages/setuptools/tests/test_test.py b/awx/lib/site-packages/setuptools/tests/test_test.py index 7a06a40329..a66294c951 100644 --- a/awx/lib/site-packages/setuptools/tests/test_test.py +++ b/awx/lib/site-packages/setuptools/tests/test_test.py @@ -1,124 +1,91 @@ # -*- coding: UTF-8 -*- -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile +from __future__ import unicode_literals + +import os import site -from distutils.errors import DistutilsError -from setuptools.compat import StringIO +import pytest + from setuptools.command.test import test -from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution -SETUP_PY = """\ -from setuptools import setup +from .textwrap import DALS +from . import contexts -setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', -) -""" +SETUP_PY = DALS(""" + from setuptools import setup -NS_INIT = """# -*- coding: Latin-1 -*- -# Söme Arbiträry Ünicode to test Issüé 310 -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) -""" -# Make sure this is Latin-1 binary, before writing: -if sys.version_info < (3,): - NS_INIT = NS_INIT.decode('UTF-8') -NS_INIT = NS_INIT.encode('Latin-1') + setup(name='foo', + packages=['name', 'name.space', 'name.space.tests'], + namespace_packages=['name'], + test_suite='name.space.tests.test_suite', + ) + """) -TEST_PY = """import unittest +NS_INIT = DALS(""" + # -*- coding: Latin-1 -*- + # Söme Arbiträry Ünicode to test Distribute Issüé 310 + try: + __import__('pkg_resources').declare_namespace(__name__) + except ImportError: + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + """) -class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used +TEST_PY = DALS(""" + import unittest -test_suite = unittest.makeSuite(TestTest) -""" + class TestTest(unittest.TestCase): + def test_test(self): + print "Foo" # Should fail under Python 3 unless 2to3 is used -class TestTestTest(unittest.TestCase): + test_suite = unittest.makeSuite(TestTest) + """) - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'name')) - os.mkdir(os.path.join(self.dir, 'name', 'space')) - os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'wt') +@pytest.fixture +def sample_test(tmpdir_cwd): + os.makedirs('name/space/tests') + + # setup.py + with open('setup.py', 'wt') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # name/__init__.py - init = os.path.join(self.dir, 'name', '__init__.py') - f = open(init, 'wb') - f.write(NS_INIT) - f.close() - # name/space/__init__.py - init = os.path.join(self.dir, 'name', 'space', '__init__.py') - f = open(init, 'wt') + + # name/__init__.py + with open('name/__init__.py', 'wb') as f: + f.write(NS_INIT.encode('Latin-1')) + + # name/space/__init__.py + with open('name/space/__init__.py', 'wt') as f: f.write('#empty\n') - f.close() - # name/space/tests/__init__.py - init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') - f = open(init, 'wt') + + # name/space/tests/__init__.py + with open('name/space/tests/__init__.py', 'wt') as f: f.write(TEST_PY) - f.close() - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site +@pytest.mark.skipif('hasattr(sys, "real_prefix")') +@pytest.mark.usefixtures('user_override') +@pytest.mark.usefixtures('sample_test') +class TestTestTest: def test_test(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - dist = Distribution(dict( + params = dict( name='foo', packages=['name', 'name.space', 'name.space.tests'], namespace_packages=['name'], test_suite='name.space.tests.test_suite', use_2to3=True, - )) + ) + dist = Distribution(params) dist.script_name = 'setup.py' cmd = test(dist) cmd.user = 1 cmd.ensure_finalized() cmd.install_dir = site.USER_SITE cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. + with contexts.quiet(): + # The test runner calls sys.exit + with contexts.suppress_exceptions(SystemExit): cmd.run() - except SystemExit: # The test runner calls sys.exit, stop that making an error. - pass - finally: - sys.stdout = old_stdout - diff --git a/awx/lib/site-packages/setuptools/tests/test_upload_docs.py b/awx/lib/site-packages/setuptools/tests/test_upload_docs.py index 769f16cc5a..cc71cadb23 100644 --- a/awx/lib/site-packages/setuptools/tests/test_upload_docs.py +++ b/awx/lib/site-packages/setuptools/tests/test_upload_docs.py @@ -1,72 +1,59 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile +import os +import zipfile +import contextlib + +import pytest + from setuptools.command.upload_docs import upload_docs from setuptools.dist import Distribution -SETUP_PY = """\ -from setuptools import setup +from .textwrap import DALS +from . import contexts -setup(name='foo') -""" -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') +SETUP_PY = DALS( + """ + from setuptools import setup + + setup(name='foo') + """) + + +@pytest.fixture +def sample_project(tmpdir_cwd): + # setup.py + with open('setup.py', 'wt') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) + os.mkdir('build') - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') + # A test document. + with open('build/index.html', 'w') as f: f.write("Hello world.") - f.close() - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) + # An empty folder. + os.mkdir('build/empty') - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site +@pytest.mark.usefixtures('sample_project') +@pytest.mark.usefixtures('user_override') +class TestUploadDocsTest: def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. + """ + Ensure zipfile creation handles common cases, including a folder + containing an empty folder. + """ dist = Distribution() cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - cmd.target_dir = self.upload_dir - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, 'foo.zip') - try: + cmd.target_dir = cmd.upload_dir = 'build' + with contexts.tempdir() as tmp_dir: + tmp_file = os.path.join(tmp_dir, 'foo.zip') zip_file = cmd.create_zipfile(tmp_file) assert zipfile.is_zipfile(tmp_file) - zip_file = zipfile.ZipFile(tmp_file) # woh... - - assert zip_file.namelist() == ['index.html'] - - zip_file.close() - finally: - shutil.rmtree(tmp_dir) - + with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: + assert zip_file.namelist() == ['index.html'] diff --git a/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py b/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py new file mode 100644 index 0000000000..5b14d07b0e --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py @@ -0,0 +1,183 @@ +""" +Python Script Wrapper for Windows +================================= + +setuptools includes wrappers for Python scripts that allows them to be +executed like regular windows programs. There are 2 wrappers, one +for command-line programs, cli.exe, and one for graphical programs, +gui.exe. These programs are almost identical, function pretty much +the same way, and are generated from the same source file. The +wrapper programs are used by copying them to the directory containing +the script they are to wrap and with the same name as the script they +are to wrap. +""" + +from __future__ import absolute_import + +import sys +import textwrap +import subprocess + +import pytest + +from setuptools.command.easy_install import nt_quote_arg +import pkg_resources + + +pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only") + + +class WrapperTester: + + @classmethod + def prep_script(cls, template): + python_exe = nt_quote_arg(sys.executable) + return template % locals() + + @classmethod + def create_script(cls, tmpdir): + """ + Create a simple script, foo-script.py + + Note that the script starts with a Unix-style '#!' line saying which + Python executable to run. The wrapper will use this line to find the + correct Python executable. + """ + + script = cls.prep_script(cls.script_tmpl) + + with (tmpdir / cls.script_name).open('w') as f: + f.write(script) + + # also copy cli.exe to the sample directory + with (tmpdir / cls.wrapper_name).open('wb') as f: + w = pkg_resources.resource_string('setuptools', cls.wrapper_source) + f.write(w) + + +class TestCLI(WrapperTester): + script_name = 'foo-script.py' + wrapper_source = 'cli-32.exe' + wrapper_name = 'foo.exe' + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + """).lstrip() + + def test_basic(self, tmpdir): + """ + When the copy of cli.exe, foo.exe in this example, runs, it examines + the path name it was run with and computes a Python script path name + by removing the '.exe' suffix and adding the '-script.py' suffix. (For + GUI programs, the suffix '-script.pyw' is added.) This is why we + named out script the way we did. Now we can run out script by running + the wrapper: + + This example was a little pathological in that it exercised windows + (MS C runtime) quoting rules: + + - Strings containing spaces are surrounded by double quotes. + + - Double quotes in strings need to be escaped by preceding them with + back slashes. + + - One or more backslashes preceding double quotes need to be escaped + by preceding each of them with back slashes. + """ + self.create_script(tmpdir) + cmd = [ + str(tmpdir / 'foo.exe'), + 'arg1', + 'arg 2', + 'arg "2\\"', + 'arg 4\\', + 'arg5 a\\\\b', + ] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) + stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] + 'hello\nworld\n' + non-optimized + """).lstrip() + assert actual == expected + + def test_with_options(self, tmpdir): + """ + Specifying Python Command-line Options + -------------------------------------- + + You can specify a single argument on the '#!' line. This can be used + to specify Python options like -O, to run in optimized mode or -i + to start the interactive interpreter. You can combine multiple + options as usual. For example, to run in optimized mode and + enter the interpreter after running the script, you could use -Oi: + """ + self.create_script(tmpdir) + tmpl = textwrap.dedent(""" + #!%(python_exe)s -Oi + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + sys.ps1 = '---' + """).lstrip() + with (tmpdir / 'foo-script.py').open('w') as f: + f.write(self.prep_script(tmpl)) + cmd = [str(tmpdir / 'foo.exe')] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + [] + '' + --- + """).lstrip() + assert actual == expected + + +class TestGUI(WrapperTester): + """ + Testing the GUI Version + ----------------------- + """ + script_name = 'bar-script.pyw' + wrapper_source = 'gui-32.exe' + wrapper_name = 'bar.exe' + + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + f = open(sys.argv[1], 'wb') + bytes_written = f.write(repr(sys.argv[2]).encode('utf-8')) + f.close() + """).strip() + + def test_basic(self, tmpdir): + """Test the GUI version with the simple scipt, bar-script.py""" + self.create_script(tmpdir) + + cmd = [ + str(tmpdir / 'bar.exe'), + str(tmpdir / 'test_output.txt'), + 'Test Argument', + ] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + assert not stdout + assert not stderr + with (tmpdir / 'test_output.txt').open('rb') as f_out: + actual = f_out.read().decode('ascii') + assert actual == repr('Test Argument') diff --git a/awx/lib/site-packages/setuptools/tests/textwrap.py b/awx/lib/site-packages/setuptools/tests/textwrap.py new file mode 100644 index 0000000000..5cd9e5bca8 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/textwrap.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +import textwrap + + +def DALS(s): + "dedent and left-strip" + return textwrap.dedent(s).lstrip() diff --git a/awx/lib/site-packages/setuptools/unicode_utils.py b/awx/lib/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000000..d2de941a69 --- /dev/null +++ b/awx/lib/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,41 @@ +import unicodedata +import sys +from setuptools.compat import unicode as decoded_string + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, decoded_string): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + fs_enc = sys.getfilesystemencoding() + if isinstance(path, decoded_string): + return path + + for enc in (fs_enc, "utf-8"): + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/awx/lib/site-packages/setuptools/utils.py b/awx/lib/site-packages/setuptools/utils.py new file mode 100644 index 0000000000..91e4b87f65 --- /dev/null +++ b/awx/lib/site-packages/setuptools/utils.py @@ -0,0 +1,11 @@ +import os +import os.path + + +def cs_path_exists(fspath): + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) \ No newline at end of file diff --git a/awx/lib/site-packages/setuptools/version.py b/awx/lib/site-packages/setuptools/version.py index 2b9ccf1770..8bb69c5f6d 100644 --- a/awx/lib/site-packages/setuptools/version.py +++ b/awx/lib/site-packages/setuptools/version.py @@ -1 +1 @@ -__version__ = '2.2' +__version__ = '12.0.5' diff --git a/awx/lib/site-packages/setuptools/windows_support.py b/awx/lib/site-packages/setuptools/windows_support.py new file mode 100644 index 0000000000..cb977cff95 --- /dev/null +++ b/awx/lib/site-packages/setuptools/windows_support.py @@ -0,0 +1,29 @@ +import platform +import ctypes + + +def windows_only(func): + if platform.system() != 'Windows': + return lambda *args, **kwargs: None + return func + + +@windows_only +def hide_file(path): + """ + Set the hidden attribute on a file or directory. + + From http://stackoverflow.com/questions/19622133/ + + `path` must be text. + """ + __import__('ctypes.wintypes') + SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW + SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD + SetFileAttributes.restype = ctypes.wintypes.BOOL + + FILE_ATTRIBUTE_HIDDEN = 0x02 + + ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) + if not ret: + raise ctypes.WinError()