Upgrading setuptools (and related modules) to 12.0.5

This commit is contained in:
Matthew Jones 2015-01-29 12:28:44 -05:00
parent d2c46d015b
commit 6abca24c12
79 changed files with 7450 additions and 3578 deletions

View File

@ -54,7 +54,7 @@ rackspace-novaclient==1.4 (no files)
rax-default-network-flags-python-novaclient-ext==0.2.3 (rax_default_network_flags_python_novaclient_ext/*)
rax-scheduled-images-python-novaclient-ext==0.2.1 (rax_scheduled_images_python_novaclient_ext/*)
requests==2.3.0 (requests/*)
setuptools==2.2 (setuptools/*, _markerlib/*, pkg_resources.py, easy_install.py, excluded bin/easy_install*)
setuptools==12.0.5 (setuptools/*, _markerlib/*, pkg_resources/*, easy_install.py)
simplejson==3.6.0 (simplejson/*, excluded simplejson/_speedups.so)
six==1.7.3 (six.py)
South==0.8.4 (south/*)

View File

@ -0,0 +1,31 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "15.0"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__

View File

@ -0,0 +1,24 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]

View File

@ -0,0 +1,40 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# flake8: noqa
if PY3:
string_types = str,
else:
string_types = basestring,
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})

View File

@ -0,0 +1,78 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class Infinity(object):
def __repr__(self):
return "Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __neg__(self):
return NegativeInfinity
Infinity = Infinity()
class NegativeInfinity(object):
def __repr__(self):
return "-Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return True
def __le__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False
def __ge__(self, other):
return False
def __neg__(self):
return Infinity
NegativeInfinity = NegativeInfinity()

View File

@ -0,0 +1,772 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse
class InvalidSpecifier(ValueError):
"""
An invalid specifier was found, users should refer to PEP 440.
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __str__(self):
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
def __hash__(self):
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
"""
@abc.abstractproperty
def prereleases(self):
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
def prereleases(self, value):
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
def contains(self, item, prereleases=None):
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
"""
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
def __init__(self, spec="", prereleases=None):
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = (
match.group("operator").strip(),
match.group("version").strip(),
)
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(
self.__class__.__name__,
str(self),
pre,
)
def __str__(self):
return "{0}{1}".format(*self._spec)
def __hash__(self):
return hash(self._spec)
def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
def __ne__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
def _coerce_version(self, version):
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def prereleases(self):
return self._prereleases
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def contains(self, item, prereleases=None):
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self._spec[0])(item, self._spec[1])
def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
kw = {"prereleases": prereleases if prereleases is not None else True}
# Attempt to iterate over all the values in the iterable and if any of
# them match, yield them.
for version in iterable:
parsed_version = self._coerce_version(version)
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if (parsed_version.is_prerelease
and not (prereleases or self.prereleases)):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the begining.
else:
yielded = True
yield version
# Now that we've iterated over everything, determine if we've yielded
# any values, and if we have not and we have any prereleases stored up
# then we will go ahead and yield the prereleases.
if not yielded and found_prereleases:
for version in found_prereleases:
yield version
class LegacySpecifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
[^\s]* # We just match everything, except for whitespace since this
# is a "legacy" specifier and the version string can be just
# about anything.
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
}
def _coerce_version(self, version):
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
@functools.wraps(fn)
def wrapped(self, prospective, spec):
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
# The identity operators allow for an escape hatch that will
# do an exact string match of the version you wish to install.
# This will not be parsed by PEP 440 and we cannot determine
# any semantic meaning from it. This operator is discouraged
# but included entirely as an escape hatch.
(?<====) # Only match for the identity operator
\s*
[^\s]* # We just match everything, except for whitespace
# since we are only testing for strict identity.
)
|
(?:
# The (non)equality operators allow for wild card and local
# versions to be specified so we have to define these two
# operators separately to enable that.
(?<===|!=) # Only match for equals and not equals
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
# You cannot use a wild card and a dev or local version
# together so group them with a | and make them optional.
(?:
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
\.\* # Wild card syntax of .*
)?
)
|
(?:
# The compatible operator requires at least two digits in the
# release segment.
(?<=~=) # Only match for the compatible operator
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
|
(?:
# All other operators only allow a sub set of what the
# (non)equality operators do. Specifically they do not allow
# local versions to be specified nor do they allow the prefix
# matching wild cards.
(?<!==|!=|~=) # We have special cases for these
# operators so we want to make sure they
# don't match here.
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"~=": "compatible",
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
"===": "arbitrary",
}
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
# implementing it ourselves. The only thing we need to do is construct
# the other specifiers.
# We want everything but the last item in the version, but we want to
# ignore post and dev releases and we want to treat the pre-release as
# it's own separate segment.
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post")
and not x.startswith("dev")),
_version_split(spec),
)
)[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
return (self._get_operator(">=")(prospective, spec)
and self._get_operator("==")(prospective, prefix))
@_require_version_compare
def _compare_equal(self, prospective, spec):
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[:len(spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
else:
# Convert our spec string into a Version
spec = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
prospective = Version(prospective.public)
return prospective == spec
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective < spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a pre-release version, that we do not accept pre-release
# versions for the version mentioned in the specifier (e.g. <3.1 should
# not match 3.1.dev0, but should match 3.0.dev0).
if not spec.is_prerelease and prospective.is_prerelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# less than the spec version *and* it's not a pre-release of the same
# version in the spec.
return True
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective > spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a post-release version, that we do not accept
# post-release versions for the version mentioned in the specifier
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
if not spec.is_postrelease and prospective.is_postrelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# Ensure that we do not allow a local version of the version mentioned
# in the specifier, which is techincally greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# greater than the spec version *and* it's not a pre-release of the
# same version in the spec.
return True
def _compare_arbitrary(self, prospective, spec):
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
return self._prereleases
# Look at all of our specifiers and determine if they are inclusive
# operators, and if they are if they are including an explicit
# prerelease.
operator, version = self._spec
if operator in ["==", ">=", "<=", "~="]:
# The == specifier can include a trailing .*, if it does we
# want to remove before parsing.
if operator == "==" and version.endswith(".*"):
version = version[:-2]
# Parse the version, and if it is a pre-release than this
# specifier allows pre-releases.
if parse(version).is_prerelease:
return True
return False
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
result.append(item)
return result
def _pad_version(left, right):
left_split, right_split = [], []
# Get the release segment of our versions
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split):])
right_split.append(left[len(right_split):])
# Insert our padding
left_split.insert(
1,
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
return (
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
parsed.add(LegacySpecifier(specifier))
# Turn our parsed specifiers into a frozen set and save them for later.
self._specs = frozenset(parsed)
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
return hash(self._specs)
def __and__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
specifier = SpecifierSet()
specifier._specs = frozenset(self._specs | other._specs)
if self._prereleases is None and other._prereleases is not None:
specifier._prereleases = other._prereleases
elif self._prereleases is not None and other._prereleases is None:
specifier._prereleases = self._prereleases
elif self._prereleases == other._prereleases:
specifier._prereleases = self._prereleases
else:
raise ValueError(
"Cannot combine SpecifierSets with True and False prerelease "
"overrides."
)
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs != other._specs
@property
def prereleases(self):
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
return self._prereleases
# Otherwise we'll see if any of the given specifiers accept
# prereleases, if any of them do we'll return True, otherwise False.
# Note: The use of any() here means that an empty set of specifiers
# will always return False, this is an explicit design decision.
return any(s.prereleases for s in self._specs)
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def contains(self, item, prereleases=None):
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
# We can determine if we're going to allow pre-releases by looking to
# see if any of the underlying items supports them. If none of them do
# and this item is a pre-release then we do not allow it and we can
# short circuit that here.
# Note: This means that 1.0.dev1 would not be contained in something
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
if (not (self.prereleases or prereleases)) and item.is_prerelease:
return False
# Determine if we're forcing a prerelease or not, we bypass
# self.prereleases here and use self._prereleases because we want to
# only take into consideration actual *forced* values. The underlying
# specifiers will handle the other logic.
# The logic here is: If prereleases is anything but None, we'll just
# go aheand and continue to use that. However if
# prereleases is None, then we'll use whatever the
# value of self._prereleases is as long as it is not
# None itself.
if prereleases is None and self._prereleases is not None:
prereleases = self._prereleases
# We simply dispatch to the underlying specs here to make sure that the
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(
s.contains(item, prereleases=prereleases)
for s in self._specs
)
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, we bypass
# self.prereleases here and use self._prereleases because we want to
# only take into consideration actual *forced* values. The underlying
# specifiers will handle the other logic.
# The logic here is: If prereleases is anything but None, we'll just
# go aheand and continue to use that. However if
# prereleases is None, then we'll use whatever the
# value of self._prereleases is as long as it is not
# None itself.
if prereleases is None and self._prereleases is not None:
prereleases = self._prereleases
# If we have any specifiers, then we want to wrap our iterable in the
# filter method for each one, this will act as a logical AND amongst
# each specifier.
if self._specs:
for spec in self._specs:
iterable = spec.filter(iterable, prereleases=prereleases)
return iterable
# If we do not have any specifiers, then we need to have a rough filter
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
for item in iterable:
# Ensure that we some kind of Version class for this item.
if not isinstance(item, (LegacyVersion, Version)):
parsed_version = parse(item)
else:
parsed_version = item
# Filter out any item which is parsed as a LegacyVersion
if isinstance(parsed_version, LegacyVersion):
continue
# Store any item which is a pre-release for later unless we've
# already found a final version or we are accepting prereleases
if parsed_version.is_prerelease and not prereleases:
if not filtered:
found_prereleases.append(item)
else:
filtered.append(item)
# If we've found no items except for pre-releases, then we'll go
# ahead and use the pre-releases
if not filtered and found_prereleases and prereleases is None:
return found_prereleases
return filtered

View File

@ -0,0 +1,401 @@
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
@property
def is_postrelease(self):
return bool(self._version.post)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local

View File

@ -0,0 +1 @@
packaging==15.0

View File

@ -0,0 +1,419 @@
Pluggable Distributions of Python Software
==========================================
Distributions
-------------
A "Distribution" is a collection of files that represent a "Release" of a
"Project" as of a particular point in time, denoted by a
"Version"::
>>> import sys, pkg_resources
>>> from pkg_resources import Distribution
>>> Distribution(project_name="Foo", version="1.2")
Foo 1.2
Distributions have a location, which can be a filename, URL, or really anything
else you care to use::
>>> dist = Distribution(
... location="http://example.com/something",
... project_name="Bar", version="0.9"
... )
>>> dist
Bar 0.9 (http://example.com/something)
Distributions have various introspectable attributes::
>>> dist.location
'http://example.com/something'
>>> dist.project_name
'Bar'
>>> dist.version
'0.9'
>>> dist.py_version == sys.version[:3]
True
>>> print(dist.platform)
None
Including various computed attributes::
>>> from pkg_resources import parse_version
>>> dist.parsed_version == parse_version(dist.version)
True
>>> dist.key # case-insensitive form of the project name
'bar'
Distributions are compared (and hashed) by version first::
>>> Distribution(version='1.0') == Distribution(version='1.0')
True
>>> Distribution(version='1.0') == Distribution(version='1.1')
False
>>> Distribution(version='1.0') < Distribution(version='1.1')
True
but also by project name (case-insensitive), platform, Python version,
location, etc.::
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.1")
False
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
... Distribution(project_name="Foo",py_version="2.4",version="1.0")
False
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="spam",version="1.0")
True
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="baz",version="1.0")
False
Hash and compare distribution by prio/plat
Get version from metadata
provider capabilities
egg_name()
as_requirement()
from_location, from_filename (w/path normalization)
Releases may have zero or more "Requirements", which indicate
what releases of another project the release requires in order to
function. A Requirement names the other project, expresses some criteria
as to what releases of that project are acceptable, and lists any "Extras"
that the requiring release may need from that project. (An Extra is an
optional feature of a Release, that can only be used if its additional
Requirements are satisfied.)
The Working Set
---------------
A collection of active distributions is called a Working Set. Note that a
Working Set can contain any importable distribution, not just pluggable ones.
For example, the Python standard library is an importable distribution that
will usually be part of the Working Set, even though it is not pluggable.
Similarly, when you are doing development work on a project, the files you are
editing are also a Distribution. (And, with a little attention to the
directory names used, and including some additional metadata, such a
"development distribution" can be made pluggable as well.)
>>> from pkg_resources import WorkingSet
A working set's entries are the sys.path entries that correspond to the active
distributions. By default, the working set's entries are the items on
``sys.path``::
>>> ws = WorkingSet()
>>> ws.entries == sys.path
True
But you can also create an empty working set explicitly, and add distributions
to it::
>>> ws = WorkingSet([])
>>> ws.add(dist)
>>> ws.entries
['http://example.com/something']
>>> dist in ws
True
>>> Distribution('foo',version="") in ws
False
And you can iterate over its distributions::
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
Adding the same distribution more than once is a no-op::
>>> ws.add(dist)
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
For that matter, adding multiple distributions for the same project also does
nothing, because a working set can only hold one active distribution per
project -- the first one added to it::
>>> ws.add(
... Distribution(
... 'http://example.com/something', project_name="Bar",
... version="7.2"
... )
... )
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can append a path entry to a working set using ``add_entry()``::
>>> ws.entries
['http://example.com/something']
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['http://example.com/something', '...pkg_resources...']
Multiple additions result in multiple entries, even if the entry is already in
the working set (because ``sys.path`` can contain the same entry more than
once)::
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['...example.com...', '...pkg_resources...', '...pkg_resources...']
And you can specify the path entry a distribution was found under, using the
optional second parameter to ``add()``::
>>> ws = WorkingSet([])
>>> ws.add(dist,"foo")
>>> ws.entries
['foo']
But even if a distribution is found under multiple path entries, it still only
shows up once when iterating the working set:
>>> ws.add_entry(ws.entries[0])
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
>>> from pkg_resources import Requirement
>>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None
None
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
Bar 0.9 (http://example.com/something)
Note that asking for a conflicting version of a distribution already in a
working set triggers a ``pkg_resources.VersionConflict`` error:
>>> try:
... ws.find(Requirement.parse("Bar==1.0"))
... except pkg_resources.VersionConflict as exc:
... print(str(exc))
... else:
... raise AssertionError("VersionConflict was not raised")
(Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
You can subscribe a callback function to receive notifications whenever a new
distribution is added to a working set. The callback is immediately invoked
once for each existing distribution in the working set, and then is called
again for new distributions added thereafter::
>>> def added(dist): print("Added %s" % dist)
>>> ws.subscribe(added)
Added Bar 0.9
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
>>> ws.add(foo12)
Added Foo 1.2
Note, however, that only the first distribution added for a given project name
will trigger a callback, even during the initial ``subscribe()`` callback::
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active
>>> ws = WorkingSet([])
>>> ws.add(foo12)
>>> ws.add(foo14)
>>> ws.subscribe(added)
Added Foo 1.2
And adding a callback more than once has no effect, either::
>>> ws.subscribe(added) # no callbacks
# and no double-callbacks on subsequent additions, either
>>> just_a_test = Distribution(project_name="JustATest", version="0.99")
>>> ws.add(just_a_test)
Added JustATest 0.99
Finding Plugins
---------------
``WorkingSet`` objects can be used to figure out what plugins in an
``Environment`` can be loaded without any resolution errors::
>>> from pkg_resources import Environment
>>> plugins = Environment([]) # normally, a list of plugin directories
>>> plugins.add(foo12)
>>> plugins.add(foo14)
>>> plugins.add(just_a_test)
In the simplest case, we just get the newest version of each distribution in
the plugin environment::
>>> ws = WorkingSet([])
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.4 (f14)], {})
But if there's a problem with a version conflict or missing requirements, the
method falls back to older versions, and the error info dict will contain an
exception instance for each unloadable plugin::
>>> ws.add(foo12) # this will conflict with Foo 1.4
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
But if you disallow fallbacks, the failed plugin will be skipped instead of
trying older versions::
>>> ws.find_plugins(plugins, fallback=False)
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
Platform Compatibility Rules
----------------------------
On the Mac, there are potential compatibility issues for modules compiled
on newer versions of Mac OS X than what the user is running. Additionally,
Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
Basic equality works as on other platforms::
>>> from pkg_resources import compatible_platforms as cp
>>> reqd = 'macosx-10.4-ppc'
>>> cp(reqd, reqd)
True
>>> cp("win32", reqd)
False
Distributions made on other machine types are not compatible::
>>> cp("macosx-10.4-i386", reqd)
False
Distributions made on earlier versions of the OS are compatible, as
long as they are from the same top-level version. The patchlevel version
number does not matter::
>>> cp("macosx-10.4-ppc", reqd)
True
>>> cp("macosx-10.3-ppc", reqd)
True
>>> cp("macosx-10.5-ppc", reqd)
False
>>> cp("macosx-9.5-ppc", reqd)
False
Backwards compatibility for packages made via earlier versions of
setuptools is provided as well::
>>> cp("darwin-8.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-7.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
False
Environment Markers
-------------------
>>> from pkg_resources import invalid_marker as im, evaluate_marker as em
>>> import os
>>> print(im("sys_platform"))
Comparison or logical expression expected
>>> print(im("sys_platform=="))
invalid syntax
>>> print(im("sys_platform=='win32'"))
False
>>> print(im("sys=='x'"))
Unknown name 'sys'
>>> print(im("(extra)"))
Comparison or logical expression expected
>>> print(im("(extra"))
invalid syntax
>>> print(im("os.open('foo')=='y'"))
Language feature not supported in environment markers
>>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit!
Language feature not supported in environment markers
>>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit!
Language feature not supported in environment markers
>>> print(im("'x' < 'y'"))
'<' operator not allowed in environment markers
>>> print(im("'x' < 'y' < 'z'"))
Chained comparison not allowed in environment markers
>>> print(im("r'x'=='x'"))
Only plain strings allowed in environment markers
>>> print(im("'''x'''=='x'"))
Only plain strings allowed in environment markers
>>> print(im('"""x"""=="x"'))
Only plain strings allowed in environment markers
>>> print(im(r"'x\n'=='x'"))
Only plain strings allowed in environment markers
>>> print(im("os.open=='y'"))
Language feature not supported in environment markers
>>> em('"x"=="x"')
True
>>> em('"x"=="y"')
False
>>> em('"x"=="y" and "x"=="x"')
False
>>> em('"x"=="y" or "x"=="x"')
True
>>> em('"x"=="y" and "x"=="q" or "z"=="z"')
True
>>> em('"x"=="y" and ("x"=="q" or "z"=="z")')
False
>>> em('"x"=="y" and "z"=="z" or "x"=="q"')
False
>>> em('"x"=="x" and "z"=="z" or "x"=="q"')
True
>>> em("sys_platform=='win32'") == (sys.platform=='win32')
True
>>> em("'x' in 'yx'")
True
>>> em("'yx' in 'x'")
False

View File

@ -0,0 +1,111 @@
import sys
import tempfile
import os
import zipfile
import datetime
import time
import subprocess
import pkg_resources
try:
unicode
except NameError:
unicode = str
def timestamp(dt):
"""
Return a timestamp for a local, naive datetime instance.
"""
try:
return dt.timestamp()
except AttributeError:
# Python 3.2 and earlier
return time.mktime(dt.timetuple())
class EggRemover(unicode):
def __call__(self):
if self in sys.path:
sys.path.remove(self)
if os.path.exists(self):
os.remove(self)
class TestZipProvider(object):
finalizers = []
ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
"A reference time for a file modification"
@classmethod
def setup_class(cls):
"create a zip egg and add it to sys.path"
egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False)
zip_egg = zipfile.ZipFile(egg, 'w')
zip_info = zipfile.ZipInfo()
zip_info.filename = 'mod.py'
zip_info.date_time = cls.ref_time.timetuple()
zip_egg.writestr(zip_info, 'x = 3\n')
zip_info = zipfile.ZipInfo()
zip_info.filename = 'data.dat'
zip_info.date_time = cls.ref_time.timetuple()
zip_egg.writestr(zip_info, 'hello, world!')
zip_egg.close()
egg.close()
sys.path.append(egg.name)
cls.finalizers.append(EggRemover(egg.name))
@classmethod
def teardown_class(cls):
for finalizer in cls.finalizers:
finalizer()
def test_resource_filename_rewrites_on_change(self):
"""
If a previous call to get_resource_filename has saved the file, but
the file has been subsequently mutated with different file of the
same size and modification time, it should not be overwritten on a
subsequent call to get_resource_filename.
"""
import mod
manager = pkg_resources.ResourceManager()
zp = pkg_resources.ZipProvider(mod)
filename = zp.get_resource_filename(manager, 'data.dat')
actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime)
assert actual == self.ref_time
f = open(filename, 'w')
f.write('hello, world?')
f.close()
ts = timestamp(self.ref_time)
os.utime(filename, (ts, ts))
filename = zp.get_resource_filename(manager, 'data.dat')
f = open(filename)
assert f.read() == 'hello, world!'
manager.cleanup_resources()
class TestResourceManager(object):
def test_get_cache_path(self):
mgr = pkg_resources.ResourceManager()
path = mgr.get_cache_path('foo')
type_ = str(type(path))
message = "Unexpected type from get_cache_path: " + type_
assert isinstance(path, (unicode, str)), message
class TestIndependence:
"""
Tests to ensure that pkg_resources runs independently from setuptools.
"""
def test_setuptools_not_imported(self):
"""
In a separate Python environment, import pkg_resources and assert
that action doesn't cause setuptools to be imported.
"""
lines = (
'import pkg_resources',
'import sys',
'assert "setuptools" not in sys.modules, '
'"setuptools was imported"',
)
cmd = [sys.executable, '-c', '; '.join(lines)]
subprocess.check_call(cmd)

View File

@ -0,0 +1,661 @@
import os
import sys
import tempfile
import shutil
import string
import pytest
import pkg_resources
from pkg_resources import (parse_requirements, VersionConflict, parse_version,
Distribution, EntryPoint, Requirement, safe_version, safe_name,
WorkingSet)
packaging = pkg_resources.packaging
def safe_repr(obj, short=False):
""" copied from Python2.7"""
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < pkg_resources._MAX_LENGTH:
return result
return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...'
class Metadata(pkg_resources.EmptyProvider):
"""Mock object to return metadata as if from an on-disk distribution"""
def __init__(self, *pairs):
self.metadata = dict(pairs)
def has_metadata(self, name):
return name in self.metadata
def get_metadata(self, name):
return self.metadata[name]
def get_metadata_lines(self, name):
return pkg_resources.yield_lines(self.get_metadata(name))
dist_from_fn = pkg_resources.Distribution.from_filename
class TestDistro:
def testCollection(self):
# empty path should produce no distributions
ad = pkg_resources.Environment([], platform=None, python=None)
assert list(ad) == []
assert ad['FooPkg'] == []
ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
# Name is in there now
assert ad['FooPkg']
# But only 1 package
assert list(ad) == ['foopkg']
# Distributions sort by version
assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2']
# Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1])
assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2']
# And inserting adds them in order
ad.add(dist_from_fn("FooPkg-1.9.egg"))
assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2']
ws = WorkingSet([])
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
req, = parse_requirements("FooPkg>=1.3")
# Nominal case: no distros on path, should yield all applicable
assert ad.best_match(req, ws).version == '1.9'
# If a matching distro is already installed, should return only that
ws.add(foo14)
assert ad.best_match(req, ws).version == '1.4'
# If the first matching distro is unsuitable, it's a version conflict
ws = WorkingSet([])
ws.add(foo12)
ws.add(foo14)
with pytest.raises(VersionConflict):
ad.best_match(req, ws)
# If more than one match on the path, the first one takes precedence
ws = WorkingSet([])
ws.add(foo14)
ws.add(foo12)
ws.add(foo14)
assert ad.best_match(req, ws).version == '1.4'
def checkFooPkg(self,d):
assert d.project_name == "FooPkg"
assert d.key == "foopkg"
assert d.version == "1.3.post1"
assert d.py_version == "2.4"
assert d.platform == "win32"
assert d.parsed_version == parse_version("1.3-1")
def testDistroBasics(self):
d = Distribution(
"/some/path",
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
)
self.checkFooPkg(d)
d = Distribution("/some/path")
assert d.py_version == sys.version[:3]
assert d.platform == None
def testDistroParse(self):
d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg")
self.checkFooPkg(d)
d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info")
self.checkFooPkg(d)
def testDistroMetadata(self):
d = Distribution(
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
metadata = Metadata(
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
)
)
self.checkFooPkg(d)
def distRequires(self, txt):
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
def checkRequires(self, dist, txt, extras=()):
assert list(dist.requires(extras)) == list(parse_requirements(txt))
def testDistroDependsSimple(self):
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
self.checkRequires(self.distRequires(v), v)
def testResolve(self):
ad = pkg_resources.Environment([])
ws = WorkingSet([])
# Resolving no requirements -> nothing to install
assert list(ws.resolve([], ad)) == []
# Request something not in the collection -> DistributionNotFound
with pytest.raises(pkg_resources.DistributionNotFound):
ws.resolve(parse_requirements("Foo"), ad)
Foo = Distribution.from_filename(
"/foo_dir/Foo-1.2.egg",
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
)
ad.add(Foo)
ad.add(Distribution.from_filename("Foo-0.9.egg"))
# Request thing(s) that are available -> list to activate
for i in range(3):
targets = list(ws.resolve(parse_requirements("Foo"), ad))
assert targets == [Foo]
list(map(ws.add,targets))
with pytest.raises(VersionConflict):
ws.resolve(parse_requirements("Foo==0.9"), ad)
ws = WorkingSet([]) # reset
# Request an extra that causes an unresolved dependency for "Baz"
with pytest.raises(pkg_resources.DistributionNotFound):
ws.resolve(parse_requirements("Foo[bar]"), ad)
Baz = Distribution.from_filename(
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
)
ad.add(Baz)
# Activation list now includes resolved dependency
assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) ==[Foo,Baz]
# Requests for conflicting versions produce VersionConflict
with pytest.raises(VersionConflict) as vc:
ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
msg = 'Foo 0.9 is installed but Foo==1.2 is required'
assert vc.value.report() == msg
def testDistroDependsOptions(self):
d = self.distRequires("""
Twisted>=1.5
[docgen]
ZConfig>=2.0
docutils>=0.3
[fastcgi]
fcgiapp>=0.1""")
self.checkRequires(d,"Twisted>=1.5")
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
["docgen","fastcgi"]
)
self.checkRequires(
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"]
)
with pytest.raises(pkg_resources.UnknownExtra):
d.requires(["foo"])
class TestWorkingSet:
def test_find_conflicting(self):
ws = WorkingSet([])
Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
ws.add(Foo)
# create a requirement that conflicts with Foo 1.2
req = next(parse_requirements("Foo<1.2"))
with pytest.raises(VersionConflict) as vc:
ws.find(req)
msg = 'Foo 1.2 is installed but Foo<1.2 is required'
assert vc.value.report() == msg
def test_resolve_conflicts_with_prior(self):
"""
A ContextualVersionConflict should be raised when a requirement
conflicts with a prior requirement for a different package.
"""
# Create installation where Foo depends on Baz 1.0 and Bar depends on
# Baz 2.0.
ws = WorkingSet([])
md = Metadata(('depends.txt', "Baz==1.0"))
Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
ws.add(Foo)
md = Metadata(('depends.txt', "Baz==2.0"))
Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
ws.add(Bar)
Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
ws.add(Baz)
Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
ws.add(Baz)
with pytest.raises(VersionConflict) as vc:
ws.resolve(parse_requirements("Foo\nBar\n"))
msg = "Baz 1.0 is installed but Baz==2.0 is required by {'Bar'}"
if pkg_resources.PY2:
msg = msg.replace("{'Bar'}", "set(['Bar'])")
assert vc.value.report() == msg
class TestEntryPoints:
def assertfields(self, ep):
assert ep.name == "foo"
assert ep.module_name == "pkg_resources.tests.test_resources"
assert ep.attrs == ("TestEntryPoints",)
assert ep.extras == ("x",)
assert ep.load() is TestEntryPoints
expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
assert str(ep) == expect
def setup_method(self, method):
self.dist = Distribution.from_filename(
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
def testBasics(self):
ep = EntryPoint(
"foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"],
["x"], self.dist
)
self.assertfields(ep)
def testParse(self):
s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
ep = EntryPoint.parse(s, self.dist)
self.assertfields(ep)
ep = EntryPoint.parse("bar baz= spammity[PING]")
assert ep.name == "bar baz"
assert ep.module_name == "spammity"
assert ep.attrs == ()
assert ep.extras == ("ping",)
ep = EntryPoint.parse(" fizzly = wocka:foo")
assert ep.name == "fizzly"
assert ep.module_name == "wocka"
assert ep.attrs == ("foo",)
assert ep.extras == ()
# plus in the name
spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer"
ep = EntryPoint.parse(spec)
assert ep.name == 'html+mako'
reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2"
@pytest.mark.parametrize("reject_spec", reject_specs)
def test_reject_spec(self, reject_spec):
with pytest.raises(ValueError):
EntryPoint.parse(reject_spec)
def test_printable_name(self):
"""
Allow any printable character in the name.
"""
# Create a name with all printable characters; strip the whitespace.
name = string.printable.strip()
spec = "{name} = module:attr".format(**locals())
ep = EntryPoint.parse(spec)
assert ep.name == name
def checkSubMap(self, m):
assert len(m) == len(self.submap_expect)
for key, ep in pkg_resources.iteritems(self.submap_expect):
assert repr(m.get(key)) == repr(ep)
submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
)
submap_str = """
# define features for blah blah
feature1 = somemodule:somefunction
feature2 = another.module:SomeClass [extra1,extra2]
feature3 = this.module [something]
"""
def testParseList(self):
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
with pytest.raises(ValueError):
EntryPoint.parse_group("x a", "foo=bar")
with pytest.raises(ValueError):
EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
def testParseMap(self):
m = EntryPoint.parse_map({'xyz':self.submap_str})
self.checkSubMap(m['xyz'])
assert list(m.keys()) == ['xyz']
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
self.checkSubMap(m['xyz'])
assert list(m.keys()) == ['xyz']
with pytest.raises(ValueError):
EntryPoint.parse_map(["[xyz]", "[xyz]"])
with pytest.raises(ValueError):
EntryPoint.parse_map(self.submap_str)
class TestRequirements:
def testBasics(self):
r = Requirement.parse("Twisted>=1.2")
assert str(r) == "Twisted>=1.2"
assert repr(r) == "Requirement.parse('Twisted>=1.2')"
assert r == Requirement("Twisted", [('>=','1.2')], ())
assert r == Requirement("twisTed", [('>=','1.2')], ())
assert r != Requirement("Twisted", [('>=','2.0')], ())
assert r != Requirement("Zope", [('>=','1.2')], ())
assert r != Requirement("Zope", [('>=','3.0')], ())
assert r != Requirement.parse("Twisted[extras]>=1.2")
def testOrdering(self):
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
assert r1 == r2
assert str(r1) == str(r2)
assert str(r2) == "Twisted==1.2c1,>=1.2"
def testBasicContains(self):
r = Requirement("Twisted", [('>=','1.2')], ())
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
twist11 = Distribution.from_filename("Twisted-1.1.egg")
twist12 = Distribution.from_filename("Twisted-1.2.egg")
assert parse_version('1.2') in r
assert parse_version('1.1') not in r
assert '1.2' in r
assert '1.1' not in r
assert foo_dist not in r
assert twist11 not in r
assert twist12 in r
def testOptionsAndHashing(self):
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
assert r1 == r2
assert r1.extras == ("foo","bar")
assert r2.extras == ("bar","foo") # extras are normalized
assert hash(r1) == hash(r2)
assert (
hash(r1)
==
hash((
"twisted",
packaging.specifiers.SpecifierSet(">=1.2"),
frozenset(["foo","bar"]),
))
)
def testVersionEquality(self):
r1 = Requirement.parse("foo==0.3a2")
r2 = Requirement.parse("foo!=0.3a4")
d = Distribution.from_filename
assert d("foo-0.3a4.egg") not in r1
assert d("foo-0.3a1.egg") not in r1
assert d("foo-0.3a4.egg") not in r2
assert d("foo-0.3a2.egg") in r1
assert d("foo-0.3a2.egg") in r2
assert d("foo-0.3a3.egg") in r2
assert d("foo-0.3a5.egg") in r2
def testSetuptoolsProjectName(self):
"""
The setuptools project should implement the setuptools package.
"""
assert (
Requirement.parse('setuptools').project_name == 'setuptools')
# setuptools 0.7 and higher means setuptools.
assert (
Requirement.parse('setuptools == 0.7').project_name == 'setuptools')
assert (
Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools')
assert (
Requirement.parse('setuptools >= 0.7').project_name == 'setuptools')
class TestParsing:
def testEmptyParse(self):
assert list(parse_requirements('')) == []
def testYielding(self):
for inp,out in [
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
(['x\n\n','y'], ['x','y']),
]:
assert list(pkg_resources.yield_lines(inp)) == out
def testSplitting(self):
sample = """
x
[Y]
z
a
[b ]
# foo
c
[ d]
[q]
v
"""
assert (
list(pkg_resources.split_sections(sample))
==
[
(None, ["x"]),
("Y", ["z", "a"]),
("b", ["c"]),
("d", []),
("q", ["v"]),
]
)
with pytest.raises(ValueError):
list(pkg_resources.split_sections("[foo"))
def testSafeName(self):
assert safe_name("adns-python") == "adns-python"
assert safe_name("WSGI Utils") == "WSGI-Utils"
assert safe_name("WSGI Utils") == "WSGI-Utils"
assert safe_name("Money$$$Maker") == "Money-Maker"
assert safe_name("peak.web") != "peak-web"
def testSafeVersion(self):
assert safe_version("1.2-1") == "1.2.post1"
assert safe_version("1.2 alpha") == "1.2.alpha"
assert safe_version("2.3.4 20050521") == "2.3.4.20050521"
assert safe_version("Money$$$Maker") == "Money-Maker"
assert safe_version("peak.web") == "peak.web"
def testSimpleRequirements(self):
assert (
list(parse_requirements('Twis-Ted>=1.2-1'))
==
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
)
assert (
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0'))
==
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
)
assert (
Requirement.parse("FooBar==1.99a3")
==
Requirement("FooBar", [('==','1.99a3')], ())
)
with pytest.raises(ValueError):
Requirement.parse(">=2.3")
with pytest.raises(ValueError):
Requirement.parse("x\\")
with pytest.raises(ValueError):
Requirement.parse("x==2 q")
with pytest.raises(ValueError):
Requirement.parse("X==1\nY==2")
with pytest.raises(ValueError):
Requirement.parse("#")
def testVersionEquality(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
assert p1 == p2, (s1,s2,p1,p2)
c('1.2-rc1', '1.2rc1')
c('0.4', '0.4.0')
c('0.4.0.0', '0.4.0')
c('0.4.0-0', '0.4-0')
c('0post1', '0.0post1')
c('0pre1', '0.0c1')
c('0.0.0preview1', '0c1')
c('0.0c1', '0-rc1')
c('1.2a1', '1.2.a.1')
c('1.2.a', '1.2a')
def testVersionOrdering(self):
def c(s1,s2):
p1, p2 = parse_version(s1),parse_version(s2)
assert p1<p2, (s1,s2,p1,p2)
c('2.1','2.1.1')
c('2a1','2b0')
c('2a1','2.1')
c('2.3a1', '2.3')
c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1')
c('2.1', '2.1post4')
c('2.1a0-20040501', '2.1')
c('1.1', '02.1')
c('3.2', '3.2.post0')
c('3.2post1', '3.2post2')
c('0.4', '4.0')
c('0.0.4', '0.4.0')
c('0post1', '0.4post1')
c('2.1.0-rc1','2.1.0')
c('2.1dev','2.1a0')
torture ="""
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
0.77.2-1 0.77.1-1 0.77.0-1
""".split()
for p,v1 in enumerate(torture):
for v2 in torture[p+1:]:
c(v2,v1)
def testVersionBuildout(self):
"""
Buildout has a function in it's bootstrap.py that inspected the return
value of parse_version. The new parse_version returns a Version class
which needs to support this behavior, at least for now.
"""
def buildout(parsed_version):
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
return _final_version(parsed_version)
assert buildout(parse_version("1.0"))
assert not buildout(parse_version("1.0a1"))
def testVersionIndexable(self):
"""
Some projects were doing things like parse_version("v")[0], so we'll
support indexing the same as we support iterating.
"""
assert parse_version("1.0")[0] == "00000001"
def testVersionTupleSort(self):
"""
Some projects expected to be able to sort tuples against the return
value of parse_version. So again we'll add a warning enabled shim to
make this possible.
"""
assert parse_version("1.0") < tuple(parse_version("2.0"))
assert parse_version("1.0") <= tuple(parse_version("2.0"))
assert parse_version("1.0") == tuple(parse_version("1.0"))
assert parse_version("3.0") > tuple(parse_version("2.0"))
assert parse_version("3.0") >= tuple(parse_version("2.0"))
assert parse_version("3.0") != tuple(parse_version("2.0"))
assert not (parse_version("3.0") != tuple(parse_version("3.0")))
def testVersionHashable(self):
"""
Ensure that our versions stay hashable even though we've subclassed
them and added some shim code to them.
"""
assert (
hash(parse_version("1.0"))
==
hash(parse_version("1.0"))
)
class TestNamespaces:
def setup_method(self, method):
self._ns_pkgs = pkg_resources._namespace_packages.copy()
self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-")
os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
self._prev_sys_path = sys.path[:]
sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
def teardown_method(self, method):
shutil.rmtree(self._tmpdir)
pkg_resources._namespace_packages = self._ns_pkgs.copy()
sys.path = self._prev_sys_path[:]
@pytest.mark.skipif(os.path.islink(tempfile.gettempdir()),
reason="Test fails when /tmp is a symlink. See #231")
def test_two_levels_deep(self):
"""
Test nested namespace packages
Create namespace packages in the following tree :
site-packages-1/pkg1/pkg2
site-packages-2/pkg1/pkg2
Check both are in the _namespace_packages dict and that their __path__
is correct
"""
sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
for site in ["site-pkgs", "site-pkgs2"]:
pkg1_init = open(os.path.join(self._tmpdir, site,
"pkg1", "__init__.py"), "w")
pkg1_init.write(ns_str)
pkg1_init.close()
pkg2_init = open(os.path.join(self._tmpdir, site,
"pkg1", "pkg2", "__init__.py"), "w")
pkg2_init.write(ns_str)
pkg2_init.close()
import pkg1
assert "pkg1" in pkg_resources._namespace_packages
# attempt to import pkg2 from site-pkgs2
import pkg1.pkg2
# check the _namespace_packages dict
assert "pkg1.pkg2" in pkg_resources._namespace_packages
assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
# check the __path__ attribute contains both paths
expected = [
os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"),
]
assert pkg1.pkg2.__path__ == expected

View File

@ -1,16 +1,17 @@
"""Extensions to the 'distutils' for large or complex distributions"""
import os
import sys
import distutils.core
import distutils.filelist
from distutils.core import Command as _Command
from distutils.util import convert_path
from fnmatch import fnmatchcase
import setuptools.version
from setuptools.extension import Extension
from setuptools.dist import Distribution, Feature, _get_unpatched
from setuptools.depends import Require
from setuptools.compat import filterfalse
__all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
@ -27,33 +28,87 @@ run_2to3_on_doctests = True
# Standard package names for fixer packages
lib2to3_fixer_packages = ['lib2to3.fixes']
def find_packages(where='.', exclude=()):
"""Return a list all Python packages found within directory 'where'
'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
will be converted to the appropriate local path syntax. 'exclude' is a
sequence of package names to exclude; '*' can be used as a wildcard in the
names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
'foo' itself).
"""
out = []
stack=[(convert_path(where), '')]
while stack:
where,prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
looks_like_package = (
'.' not in name
and os.path.isdir(fn)
and os.path.isfile(os.path.join(fn, '__init__.py'))
)
if looks_like_package:
out.append(prefix+name)
stack.append((fn, prefix+name+'.'))
for pat in list(exclude)+['ez_setup']:
from fnmatch import fnmatchcase
out = [item for item in out if not fnmatchcase(item,pat)]
return out
class PackageFinder(object):
@classmethod
def find(cls, where='.', exclude=(), include=('*',)):
"""Return a list all Python packages found within directory 'where'
'where' should be supplied as a "cross-platform" (i.e. URL-style)
path; it will be converted to the appropriate local path syntax.
'exclude' is a sequence of package names to exclude; '*' can be used
as a wildcard in the names, such that 'foo.*' will exclude all
subpackages of 'foo' (but not 'foo' itself).
'include' is a sequence of package names to include. If it's
specified, only the named packages will be included. If it's not
specified, all found packages will be included. 'include' can contain
shell style wildcard patterns just like 'exclude'.
The list of included packages is built up first and then any
explicitly excluded packages are removed from it.
"""
out = cls._find_packages_iter(convert_path(where))
out = cls.require_parents(out)
includes = cls._build_filter(*include)
excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude)
out = filter(includes, out)
out = filterfalse(excludes, out)
return list(out)
@staticmethod
def require_parents(packages):
"""
Exclude any apparent package that apparently doesn't include its
parent.
For example, exclude 'foo.bar' if 'foo' is not present.
"""
found = []
for pkg in packages:
base, sep, child = pkg.rpartition('.')
if base and base not in found:
continue
found.append(pkg)
yield pkg
@staticmethod
def _all_dirs(base_path):
"""
Return all dirs in base_path, relative to base_path
"""
for root, dirs, files in os.walk(base_path, followlinks=True):
for dir in dirs:
yield os.path.relpath(os.path.join(root, dir), base_path)
@classmethod
def _find_packages_iter(cls, base_path):
dirs = cls._all_dirs(base_path)
suitable = filterfalse(lambda n: '.' in n, dirs)
return (
path.replace(os.path.sep, '.')
for path in suitable
if cls._looks_like_package(os.path.join(base_path, path))
)
@staticmethod
def _looks_like_package(path):
return os.path.isfile(os.path.join(path, '__init__.py'))
@staticmethod
def _build_filter(*patterns):
"""
Given a list of patterns, return a callable that will be true only if
the input matches one of the patterns.
"""
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
class PEP420PackageFinder(PackageFinder):
@staticmethod
def _looks_like_package(path):
return True
find_packages = PackageFinder.find
setup = distutils.core.setup
@ -83,7 +138,7 @@ def findall(dir = os.curdir):
(relative to 'dir').
"""
all_files = []
for base, dirs, files in os.walk(dir):
for base, dirs, files in os.walk(dir, followlinks=True):
if base==os.curdir or base.startswith(os.curdir+os.sep):
base = base[2:]
if base:
@ -92,7 +147,3 @@ def findall(dir = os.curdir):
return all_files
distutils.filelist.findall = findall # fix findall bug in distutils.
# sys.dont_write_bytecode was introduced in Python 2.6.
_dont_write_bytecode = getattr(sys, 'dont_write_bytecode',
bool(os.environ.get("PYTHONDONTWRITEBYTECODE")))

View File

@ -6,42 +6,25 @@ __all__ = [
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
import zipfile, tarfile, os, shutil, posixpath
from pkg_resources import ensure_directory
import zipfile
import tarfile
import os
import shutil
import posixpath
import contextlib
from pkg_resources import ensure_directory, ContextualZipFile
from distutils.errors import DistutilsError
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src,dst):
"""The default progress/filter callback; returns True for all files"""
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None
):
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
@ -75,52 +58,33 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter,
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % (filename,))
raise UnrecognizedFormat("%s is not a directory" % filename)
paths = {filename:('',extract_dir)}
paths = {
filename: ('', extract_dir),
}
for base, dirs, files in os.walk(filename):
src,dst = paths[base]
src, dst = paths[base]
for d in dirs:
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
for f in files:
name = src+f
target = os.path.join(dst,f)
target = progress_filter(src+f, target)
target = os.path.join(dst, f)
target = progress_filter(src + f, target)
if not target:
continue # skip non-files
# skip non-files
continue
ensure_directory(target)
f = os.path.join(base,f)
f = os.path.join(base, f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
@ -132,8 +96,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
z = zipfile.ZipFile(filename)
try:
with ContextualZipFile(filename) as z:
for info in z.infolist():
name = info.filename
@ -152,17 +115,11 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
# file
ensure_directory(target)
data = z.read(info.filename)
f = open(target,'wb')
try:
with open(target, 'wb') as f:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
@ -178,19 +135,22 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
try:
tarobj.chown = lambda *args: None # don't do any chowning!
with contextlib.closing(tarobj):
# don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal files
# resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
linkpath = posixpath.join(posixpath.dirname(member.name), linkpath)
base = posixpath.dirname(member.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
@ -200,11 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
tarobj._extract_member(member, final_dst) # XXX Ugh
# XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError:
pass # chown/chmod/mkfifo/mknode/makedev failed
# chown/chmod/mkfifo/mknode/makedev failed
pass
return True
finally:
tarobj.close()
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile

View File

@ -5,10 +5,11 @@ __all__ = [
'register', 'bdist_wininst', 'upload_docs',
]
from setuptools.command import install_scripts
from distutils.command.bdist import bdist
import sys
from distutils.command.bdist import bdist
from setuptools.command import install_scripts
if 'egg' not in bdist.format_commands:
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")

View File

@ -1,27 +1,26 @@
import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from distutils.errors import DistutilsOptionError
from setuptools.command.setopt import edit_config, option_base, config_file
def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg: return repr(arg)
if c in arg:
return repr(arg)
if arg.split() != [arg]:
return repr(arg)
return arg
return arg
class alias(option_base):
"""Define a shortcut that invokes one or more commands"""
description = "define a shortcut to invoke one or more commands"
command_consumes_arguments = True
user_options = [
('remove', 'r', 'remove (unset) the alias'),
('remove', 'r', 'remove (unset) the alias'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
@ -49,7 +48,7 @@ class alias(option_base):
print("setup.py alias", format_alias(alias, aliases))
return
elif len(self.args)==1:
elif len(self.args) == 1:
alias, = self.args
if self.remove:
command = None
@ -61,9 +60,9 @@ class alias(option_base):
return
else:
alias = self.args[0]
command = ' '.join(map(shquote,self.args[1:]))
command = ' '.join(map(shquote, self.args[1:]))
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
def format_alias(name, aliases):
@ -76,7 +75,4 @@ def format_alias(name, aliases):
source = ''
else:
source = '--filename=%r' % source
return source+name+' '+command
return source + name + ' ' + command

View File

@ -3,26 +3,33 @@
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
import sys, os, marshal
from setuptools import Command
from distutils.errors import DistutilsSetupError
from distutils.dir_util import remove_tree, mkpath
from distutils import log
from types import CodeType
import sys
import os
import marshal
import textwrap
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from setuptools.compat import basestring
from setuptools.extension import Library
from setuptools import Command
try:
# Python 2.7 or >=3.2
from sysconfig import get_path, get_python_version
def _get_purelib():
return get_path("purelib")
except ImportError:
from distutils.sysconfig import get_python_lib, get_python_version
def _get_purelib():
return get_python_lib(False)
from distutils import log
from distutils.errors import DistutilsSetupError
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from types import CodeType
from setuptools.compat import basestring, next
from setuptools.extension import Library
def strip_module(filename):
if '.' in filename:
@ -31,66 +38,45 @@ def strip_module(filename):
filename = filename[:-6]
return filename
def write_stub(resource, pyfile):
f = open(pyfile,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __loader__, __file__",
" import sys, pkg_resources, imp",
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% resource,
" __loader__ = None; del __bootstrap__, __loader__",
" imp.load_dynamic(__name__,__file__)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
# stub __init__.py for packages distributed without one
NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
def write_stub(resource, pyfile):
_stub_template = textwrap.dedent("""
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, %r)
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
""").lstrip()
with open(pyfile, 'w') as f:
f.write(_stub_template % resource)
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
"temporary directory for creating the distribution"),
('plat-name=', 'p', "platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options (self):
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
@ -99,7 +85,6 @@ class bdist_egg(Command):
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
@ -111,7 +96,7 @@ class bdist_egg(Command):
if self.plat_name is None:
self.plat_name = get_build_platform()
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
if self.egg_output is None:
@ -122,32 +107,25 @@ class bdist_egg(Command):
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
old, self.distribution.data_files = self.distribution.data_files,[]
old, self.distribution.data_files = self.distribution.data_files, []
for item in old:
if isinstance(item,tuple) and len(item)==2:
if isinstance(item, tuple) and len(item) == 2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized==site_packages or normalized.startswith(
site_packages+os.sep
if normalized == site_packages or normalized.startswith(
site_packages + os.sep
):
item = realpath[len(site_packages)+1:], item[1]
# XXX else: raise ???
item = realpath[len(site_packages) + 1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
@ -156,22 +134,19 @@ class bdist_egg(Command):
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self,cmdname,**kw):
def call_command(self, cmdname, **kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname,self.bdist_dir)
kw.setdefault('skip_build',self.skip_build)
kw.setdefault(dirname, self.bdist_dir)
kw.setdefault('skip_build', self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
@ -179,7 +154,8 @@ class bdist_egg(Command):
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root; instcmd.root = None
old_root = instcmd.root
instcmd.root = None
if self.distribution.has_c_libraries() and not self.skip_build:
self.run_command('build_clib')
cmd = self.call_command('install_lib', warn_dir=0)
@ -188,17 +164,17 @@ class bdist_egg(Command):
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p,ext_name) in enumerate(ext_outputs):
filename,ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
for (p, ext_name) in enumerate(ext_outputs):
filename, ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
'.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep,'/')
ext_outputs[p] = ext_name.replace(os.sep, '/')
to_compile.extend(self.make_init_files())
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
@ -206,12 +182,13 @@ class bdist_egg(Command):
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root,'EGG-INFO')
egg_info = os.path.join(archive_root, 'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
self.call_command('install_scripts', install_dir=script_dir,
no_ep=1)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
@ -229,10 +206,10 @@ class bdist_egg(Command):
os.unlink(native_libs)
write_safety_flag(
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
@ -243,61 +220,33 @@ class bdist_egg(Command):
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run, mode=self.gen_header())
dry_run=self.dry_run, mode=self.gen_header())
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution,'dist_files',[]).append(
('bdist_egg',get_python_version(),self.egg_output))
getattr(self.distribution, 'dist_files', []).append(
('bdist_egg', get_python_version(), self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base,dirs,files in walk_egg(self.bdist_dir):
for base, dirs, files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base,name)
path = os.path.join(base, name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution,'zip_safe',None)
safe = getattr(self.distribution, 'zip_safe', None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def make_init_files(self):
"""Create missing package __init__ files"""
init_files = []
for base,dirs,files in walk_egg(self.bdist_dir):
if base==self.bdist_dir:
# don't put an __init__ in the root
continue
for name in files:
if name.endswith('.py'):
if '__init__.py' not in files:
pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
if self.distribution.has_contents_for(pkg):
log.warn("Creating missing __init__.py for %s",pkg)
filename = os.path.join(base,'__init__.py')
if not self.dry_run:
f = open(filename,'w'); f.write(NS_PKG_STUB)
f.close()
init_files.append(filename)
break
else:
# not a package, don't traverse to subdirectories
dirs[:] = []
return init_files
def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
@ -325,7 +274,6 @@ class bdist_egg(Command):
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
@ -335,13 +283,12 @@ class bdist_egg(Command):
f.close()
return 'a'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
# normalize the path (so that a forward-slash in egg_info will
# match using startswith below)
norm_egg_info = os.path.normpath(self.egg_info)
prefix = os.path.join(norm_egg_info,'')
prefix = os.path.join(norm_egg_info, '')
for path in self.ei_cmd.filelist.files:
if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):])
@ -354,23 +301,24 @@ class bdist_egg(Command):
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir:''}
paths = {self.bdist_dir: ''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base]+filename)
all_outputs.append(paths[base] + filename)
for filename in dirs:
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
paths[os.path.join(base, filename)] = (paths[base] +
filename + '/')
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext,Library):
if isinstance(ext, Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir,filename)):
if os.path.exists(os.path.join(self.bdist_dir, filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
@ -379,24 +327,24 @@ class bdist_egg(Command):
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base,dirs,files = next(walker)
base, dirs, files = next(walker)
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base,dirs,files
yield base, dirs, files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag,fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
for flag, fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
return flag
if not can_scan(): return False
if not can_scan():
return False
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
@ -407,35 +355,42 @@ def analyze_egg(egg_dir, stubs):
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag,fn in safety_flags.items():
for flag, fn in safety_flags.items():
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe) != flag:
os.unlink(fn)
elif safe is not None and bool(safe)==flag:
f=open(fn,'wt'); f.write('\n'); f.close()
elif safe is not None and bool(safe) == flag:
f = open(fn, 'wt')
f.write('\n')
f.close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base,name)
filename = os.path.join(base, name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
return True # Extension module
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
if sys.version_info < (3, 3):
skip = 8 # skip magic & date
skip = 8 # skip magic & date
else:
skip = 12 # skip magic & date & file size
f = open(filename,'rb'); f.read(skip)
code = marshal.load(f); f.close()
f = open(filename, 'rb')
f.read(skip)
code = marshal.load(f)
f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
@ -452,21 +407,24 @@ def scan_module(egg_dir, base, name, stubs):
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield name
for name in code.co_names:
yield name
for const in code.co_consts:
if isinstance(const,basestring):
if isinstance(const, basestring):
yield const
elif isinstance(const,CodeType):
elif isinstance(const, CodeType):
for name in iter_symbols(const):
yield name
def can_scan():
if not sys.platform.startswith('java') and sys.platform != 'cli':
# CPython, PyPy, etc.
@ -475,39 +433,6 @@ def can_scan():
log.warn("Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py")
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
@ -515,9 +440,9 @@ INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
mode='w'
):
mode='w'):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
@ -525,6 +450,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
@ -532,13 +458,14 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir)+1:]
p = path[len(base_dir) + 1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
# avoid 2.3 zipimport bug when 64 bits
compress = (sys.version >= "2.4")
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
@ -550,4 +477,3 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
for dirname, dirs, files in os.walk(base_dir):
visit(None, dirname, files)
return zip_filename
#

View File

@ -1,51 +1,30 @@
# This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs, and
# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
import distutils.command.bdist_rpm as orig
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
import sys, os
class bdist_rpm(_bdist_rpm):
class bdist_rpm(orig.bdist_rpm):
"""
Override the default bdist_rpm behavior to do the following:
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
if sys.version<"2.5":
# Track for uploading any .rpm file(s) moved to self.dist_dir
def move_file(self, src, dst, level=1):
_bdist_rpm.move_file(self, src, dst, level)
if dst==self.dist_dir and src.endswith('.rpm'):
getattr(self.distribution,'dist_files',[]).append(
('bdist_rpm',
src.endswith('.src.rpm') and 'any' or sys.version[:3],
os.path.join(dst, os.path.basename(src)))
)
1. Run egg_info to ensure the name and version are properly calculated.
2. Always run 'install' using --single-version-externally-managed to
disable eggs in RPM distributions.
3. Replace dash with underscore in the version numbers for better RPM
compatibility.
"""
def run(self):
self.run_command('egg_info') # ensure distro name is up-to-date
_bdist_rpm.run(self)
# ensure distro name is up-to-date
self.run_command('egg_info')
orig.bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version '+version
line24 = '%define version '+rpmversion
spec = [
rpmversion = version.replace('-', '_')
spec = orig.bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version
line24 = '%define version ' + rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
@ -55,28 +34,10 @@ class bdist_rpm(_bdist_rpm):
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23,line24)
).replace(line23, line24)
for line in spec
]
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec

View File

@ -1,82 +1,21 @@
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
import os, sys
import distutils.command.bdist_wininst as orig
class bdist_wininst(_bdist_wininst):
_good_upload = _bad_upload = None
def create_exe(self, arcname, fullname, bitmap=None):
_bdist_wininst.create_exe(self, arcname, fullname, bitmap)
installer_name = self.get_installer_filename(fullname)
if self.target_version:
pyversion = self.target_version
# fix 2.5+ bdist_wininst ignoring --target-version spec
self._bad_upload = ('bdist_wininst', 'any', installer_name)
else:
pyversion = 'any'
self._good_upload = ('bdist_wininst', pyversion, installer_name)
def _fix_upload_names(self):
good, bad = self._good_upload, self._bad_upload
dist_files = getattr(self.distribution, 'dist_files', [])
if bad in dist_files:
dist_files.remove(bad)
if good not in dist_files:
dist_files.append(good)
def reinitialize_command (self, command, reinit_subcommands=0):
class bdist_wininst(orig.bdist_wininst):
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # work around distutils bug
cmd.install_lib = None
return cmd
def run(self):
self._is_running = True
try:
_bdist_wininst.run(self)
self._fix_upload_names()
orig.bdist_wininst.run(self)
finally:
self._is_running = False
if not hasattr(_bdist_wininst, 'get_installer_filename'):
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.win32-py%s.exe" %
(fullname, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.win32.exe" % fullname)
return installer_name
# get_installer_filename()

View File

@ -1,25 +1,30 @@
from distutils.command.build_ext import build_ext as _du_build_ext
from distutils.file_util import copy_file
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler
from distutils.errors import DistutilsError
from distutils import log
import os
import sys
import itertools
from setuptools.extension import Library
try:
# Attempt to use Pyrex for building extensions, if available
from Pyrex.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
import os, sys
from distutils.file_util import copy_file
from setuptools.extension import Library
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler
try:
# Python 2.7 or >=3.2
from sysconfig import _CONFIG_VARS
except ImportError:
from distutils.sysconfig import get_config_var
get_config_var("LDSHARED") # make sure _config_vars is initialized
del get_config_var
from distutils.sysconfig import _config_vars as _CONFIG_VARS
from distutils import log
from distutils.errors import *
have_rtld = False
use_stubs = False
@ -29,20 +34,13 @@ if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
from dl import RTLD_NOW
have_rtld = True
use_stubs = True
import dl
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
except ImportError:
pass
def if_dl(s):
if have_rtld:
return s
return ''
if_dl = lambda s: s if have_rtld else ''
class build_ext(_build_ext):
@ -62,8 +60,9 @@ class build_ext(_build_ext):
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,os.path.basename(filename))
src_filename = os.path.join(self.build_lib,filename)
dest_filename = os.path.join(package_dir,
os.path.basename(filename))
src_filename = os.path.join(self.build_lib, filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
@ -75,8 +74,8 @@ class build_ext(_build_ext):
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
if _build_ext is not _du_build_ext and not hasattr(_build_ext,
'pyrex_sources'):
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
def swig_sources(self, sources, *otherargs):
# first do any Pyrex processing
@ -84,18 +83,16 @@ class build_ext(_build_ext):
# Then do any actual SWIG stuff on the remainder
return _du_build_ext.swig_sources(self, sources, *otherargs)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self,fullname)
filename = _build_ext.get_ext_filename(self, fullname)
if fullname in self.ext_map:
ext = self.ext_map[fullname]
if isinstance(ext,Library):
if isinstance(ext, Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn,libtype)
return self.shlib_compiler.library_filename(fn, libtype)
elif use_stubs and ext._links_to_dynamic:
d,fn = os.path.split(filename)
return os.path.join(d,'dl-'+fn)
d, fn = os.path.split(filename)
return os.path.join(d, 'dl-' + fn)
return filename
def initialize_options(self):
@ -109,7 +106,7 @@ class build_ext(_build_ext):
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext,Library)]
if isinstance(ext, Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
@ -122,11 +119,12 @@ class build_ext(_build_ext):
# XXX what to do with conflicts?
self.ext_map[fullname.split('.')[-1]] = ext
ltd = ext._links_to_dynamic = \
self.shlibs and self.links_to_dynamic(ext) or False
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
ltd = self.shlibs and self.links_to_dynamic(ext) or False
ns = ltd and use_stubs and not isinstance(ext, Library)
ext._links_to_dynamic = ltd
ext._needs_stub = ns
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
@ -140,7 +138,8 @@ class build_ext(_build_ext):
tmp = _CONFIG_VARS.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
_CONFIG_VARS['LDSHARED'] = (
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
_CONFIG_VARS['SO'] = ".dylib"
customize_compiler(compiler)
@ -154,7 +153,7 @@ class build_ext(_build_ext):
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
for (name, value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
@ -171,23 +170,20 @@ class build_ext(_build_ext):
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext,Library):
if isinstance(ext, Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self,ext)
return _build_ext.get_export_symbols(self, ext)
def build_extension(self, ext):
_compiler = self.compiler
try:
if isinstance(ext,Library):
if isinstance(ext, Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self,ext)
_build_ext.build_extension(self, ext)
if ext._needs_stub:
self.write_stub(
self.get_finalized_command('build_py').build_lib, ext
)
cmd = self.get_finalized_command('build_py').build_lib
self.write_stub(cmd, ext)
finally:
self.compiler = _compiler
@ -197,54 +193,66 @@ class build_ext(_build_ext):
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
for libname in ext.libraries:
if pkg+libname in libnames: return True
return False
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
return any(pkg + libname in libnames for libname in ext.libraries)
def get_outputs(self):
outputs = _build_ext.get_outputs(self)
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions:
if ext._needs_stub:
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base+'.py')
outputs.append(base+'.pyc')
if optimize:
outputs.append(base+'.pyo')
return outputs
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
def __get_stubs_outputs(self):
# assemble the base name for each extension that needs a stub
ns_ext_bases = (
os.path.join(self.build_lib, *ext._full_name.split('.'))
for ext in self.extensions
if ext._needs_stub
)
# pair each base with the extension
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
return list(base + fnext for base, fnext in pairs)
def __get_output_extensions(self):
yield '.py'
yield '.pyc'
if self.get_finalized_command('build_py').optimize:
yield '.pyo'
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
log.info("writing stub loader for %s to %s", ext._full_name,
output_dir)
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
'.py')
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file+" already exists! Please delete.")
raise DistutilsError(stub_file + " already exists! Please delete.")
if not self.dry_run:
f = open(stub_file,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
]))
f = open(stub_file, 'w')
f.write(
'\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
" __file__ = pkg_resources.resource_filename"
"(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
])
)
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
@ -255,14 +263,15 @@ class build_ext(_build_ext):
os.unlink(stub_file)
if use_stubs or os.name=='nt':
if use_stubs or os.name == 'nt':
# Build shared libraries
#
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
): self.link(
def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
target_lang=None):
self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
@ -272,19 +281,19 @@ else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
):
def link_shared_object(
self, objects, output_libname, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
target_lang=None):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
#libraries=None, library_dirs=None, runtime_library_dirs=None,
#export_symbols=None, extra_preargs=None, extra_postargs=None,
#build_temp=None
# or warn/abort here if set, or something...
# libraries=None, library_dirs=None, runtime_library_dirs=None,
# export_symbols=None, extra_preargs=None, extra_postargs=None,
# build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir,filename = os.path.split(output_libname)
assert output_dir is None # distutils build_ext doesn't pass this
output_dir, filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
@ -294,5 +303,3 @@ else:
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)

View File

@ -1,10 +1,10 @@
from glob import glob
from distutils.util import convert_path
import distutils.command.build_py as orig
import os
import sys
import fnmatch
import textwrap
from distutils.command.build_py import build_py as _build_py
from distutils.util import convert_path
from glob import glob
try:
from setuptools.lib2to3_ex import Mixin2to3
@ -13,7 +13,8 @@ except ImportError:
def run_2to3(self, files, doctests=True):
"do nothing"
class build_py(_build_py, Mixin2to3):
class build_py(orig.build_py, Mixin2to3):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
@ -22,11 +23,14 @@ class build_py(_build_py, Mixin2to3):
Also, this version of the 'build_py' command allows you to specify both
'py_modules' and 'packages' in the same setup operation.
"""
def finalize_options(self):
_build_py.finalize_options(self)
orig.build_py.finalize_options(self)
self.package_data = self.distribution.package_data
self.exclude_package_data = self.distribution.exclude_package_data or {}
if 'data_files' in self.__dict__: del self.__dict__['data_files']
self.exclude_package_data = (self.distribution.exclude_package_data or
{})
if 'data_files' in self.__dict__:
del self.__dict__['data_files']
self.__updated_files = []
self.__doctests_2to3 = []
@ -48,16 +52,17 @@ class build_py(_build_py, Mixin2to3):
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
def __getattr__(self, attr):
if attr=='data_files': # lazily compute data files
if attr == 'data_files': # lazily compute data files
self.data_files = files = self._get_data_files()
return files
return _build_py.__getattr__(self,attr)
return orig.build_py.__getattr__(self, attr)
def build_module(self, module, module_file, package):
outfile, copied = _build_py.build_module(self, module, module_file, package)
outfile, copied = orig.build_py.build_module(self, module, module_file,
package)
if copied:
self.__updated_files.append(outfile)
return outfile, copied
@ -74,12 +79,12 @@ class build_py(_build_py, Mixin2to3):
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = len(src_dir)+1
plen = len(src_dir) + 1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
]
data.append((package, src_dir, build_dir, filenames))
return data
@ -102,7 +107,8 @@ class build_py(_build_py, Mixin2to3):
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
if copied and srcfile in self.distribution.convert_2to3_doctests:
if (copied and
srcfile in self.distribution.convert_2to3_doctests):
self.__doctests_2to3.append(outf)
def analyze_manifest(self):
@ -117,21 +123,22 @@ class build_py(_build_py, Mixin2to3):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
for path in ei_cmd.filelist.files:
d,f = os.path.split(assert_relative(path))
d, f = os.path.split(assert_relative(path))
prev = None
oldf = f
while d and d!=prev and d not in src_dirs:
while d and d != prev and d not in src_dirs:
prev = d
d, df = os.path.split(d)
f = os.path.join(df, f)
if d in src_dirs:
if path.endswith('.py') and f==oldf:
continue # it's a module, not data
mf.setdefault(src_dirs[d],[]).append(path)
if path.endswith('.py') and f == oldf:
continue # it's a module, not data
mf.setdefault(src_dirs[d], []).append(path)
def get_data_files(self): pass # kludge 2.4 for lazy computation
def get_data_files(self):
pass # kludge 2.4 for lazy computation
if sys.version<"2.4": # Python 2.4 already has this code
if sys.version < "2.4": # Python 2.4 already has this code
def get_outputs(self, include_bytecode=1):
"""Return complete list of files copied to the build directory
@ -140,11 +147,11 @@ class build_py(_build_py, Mixin2to3):
needed for the 'install_lib' command to do its job properly, and to
generate a correct installation manifest.)
"""
return _build_py.get_outputs(self, include_bytecode) + [
return orig.build_py.get_outputs(self, include_bytecode) + [
os.path.join(build_dir, filename)
for package, src_dir, build_dir,filenames in self.data_files
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames
]
]
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
@ -153,36 +160,37 @@ class build_py(_build_py, Mixin2to3):
except KeyError:
pass
init_py = _build_py.check_package(self, package, package_dir)
init_py = orig.build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg==package or pkg.startswith(package+'.'):
if pkg == package or pkg.startswith(package + '.'):
break
else:
return init_py
f = open(init_py,'rbU')
f = open(init_py, 'rbU')
if 'declare_namespace'.encode() not in f.read():
from distutils import log
log.warn(
"WARNING: %s is a namespace package, but its __init__.py does\n"
"not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
'(See the setuptools manual under "Namespace Packages" for '
"details.)\n", package
from distutils.errors import DistutilsError
raise DistutilsError(
"Namespace package problem: %s is a namespace package, but "
"its\n__init__.py does not call declare_namespace()! Please "
'fix it.\n(See the setuptools manual under '
'"Namespace Packages" for details.)\n"' % (package,)
)
f.close()
return init_py
def initialize_options(self):
self.packages_checked={}
_build_py.initialize_options(self)
self.packages_checked = {}
orig.build_py.initialize_options(self)
def get_package_dir(self, package):
res = _build_py.get_package_dir(self, package)
res = orig.build_py.get_package_dir(self, package)
if self.distribution.src_root is not None:
return os.path.join(self.distribution.src_root, res)
return res
@ -202,7 +210,7 @@ class build_py(_build_py, Mixin2to3):
seen = {}
return [
f for f in files if f not in bad
and f not in seen and seen.setdefault(f,1) # ditch dupes
and f not in seen and seen.setdefault(f, 1) # ditch dupes
]
@ -210,6 +218,7 @@ def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
msg = textwrap.dedent("""
Error: setup script specifies an absolute path:

View File

@ -1,9 +1,14 @@
from setuptools.command.easy_install import easy_install
from distutils.util import convert_path, subst_vars
from pkg_resources import Distribution, PathMetadata, normalize_path
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError
import os, sys, setuptools, glob
import os
import glob
from pkg_resources import Distribution, PathMetadata, normalize_path
from setuptools.command.easy_install import easy_install
from setuptools.compat import PY3
import setuptools
class develop(easy_install):
"""Set up package for development"""
@ -32,59 +37,56 @@ class develop(easy_install):
self.egg_path = None
easy_install.initialize_options(self)
self.setup_path = None
self.always_copy_from = '.' # always copy eggs installed in curdir
self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
raise DistutilsError(
"Please rename %r to %r before using 'develop'"
% (ei.egg_info, ei.broken_egg_info)
)
template = "Please rename %r to %r before using 'develop'"
args = ei.egg_info, ei.broken_egg_info
raise DistutilsError(template % args)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.expand_basedirs()
self.expand_dirs()
# pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg'))
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
self.egg_link = os.path.join(self.install_dir, ei.egg_name +
'.egg-link')
self.egg_base = ei.egg_base
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
target = normalize_path(self.egg_base)
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
egg_path = normalize_path(os.path.join(self.install_dir,
self.egg_path))
if egg_path != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
" directory to "+target
)
" directory to " + target
)
# Make a distribution for the package's source
self.dist = Distribution(
target,
PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name = ei.egg_name
project_name=ei.egg_name
)
p = self.egg_base.replace(os.sep,'/')
if p!= os.curdir:
p = '../' * (p.count('/')+1)
p = self.egg_base.replace(os.sep, '/')
if p != os.curdir:
p = '../' * (p.count('/') + 1)
self.setup_path = p
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
if p != normalize_path(os.curdir):
if p != normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
" installation directory", p, normalize_path(os.curdir))
def install_for_development(self):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
if PY3 and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
@ -99,12 +101,13 @@ class develop(easy_install):
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
# XXX
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
@ -112,7 +115,7 @@ class develop(easy_install):
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
@ -121,21 +124,21 @@ class develop(easy_install):
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
f = open(self.egg_link,"w")
f = open(self.egg_link, "w")
f.write(self.egg_path + "\n" + self.setup_path)
f.close()
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist, not self.no_deps)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
if contents not in ([self.egg_path],
[self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
@ -149,7 +152,7 @@ class develop(easy_install):
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self,dist)
return easy_install.install_egg_scripts(self, dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
@ -160,8 +163,7 @@ class develop(easy_install):
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
f = open(script_path,'rU')
f = open(script_path, 'rU')
script_text = f.read()
f.close()
self.install_script(dist, script_name, script_text, script_path)

File diff suppressed because it is too large Load Diff

View File

@ -2,22 +2,30 @@
Create a distribution's .egg-info directory and contents"""
from distutils.filelist import FileList as _FileList
from distutils.util import convert_path
from distutils import log
import distutils.errors
import distutils.filelist
import os
import re
import sys
from setuptools import Command
import distutils.errors
from distutils import log
from setuptools.command.sdist import sdist
from setuptools.compat import basestring
from setuptools import svn_utils
from distutils.util import convert_path
from distutils.filelist import FileList as _FileList
from pkg_resources import (parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
from setuptools.command.sdist import walk_revctrl
try:
from setuptools_svn import svn_utils
except ImportError:
pass
from setuptools import Command
from setuptools.command.sdist import sdist
from setuptools.compat import basestring, PY3, StringIO
from setuptools.command.sdist import walk_revctrl
from pkg_resources import (
parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
import setuptools.unicode_utils as unicode_utils
from pkg_resources import packaging
class egg_info(Command):
description = "create a distribution's .egg-info directory"
@ -26,11 +34,11 @@ class egg_info(Command):
('egg-base=', 'e', "directory containing .egg-info directories"
" (default: top of the source tree)"),
('tag-svn-revision', 'r',
"Add subversion revision ID to version number"),
"Add subversion revision ID to version number"),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
('no-svn-revision', 'R',
"Don't add subversion revision ID [default]"),
"Don't add subversion revision ID [default]"),
('no-date', 'D', "Don't include date stamp [default]"),
]
@ -51,6 +59,7 @@ class egg_info(Command):
def save_version_info(self, filename):
from setuptools.command.setopt import edit_config
values = dict(
egg_info=dict(
tag_svn_revision=0,
@ -65,25 +74,32 @@ class egg_info(Command):
self.vtags = self.tags()
self.egg_version = self.tagged_version()
parsed_version = parse_version(self.egg_version)
try:
is_version = isinstance(parsed_version, packaging.version.Version)
spec = (
"%s==%s" if is_version else "%s===%s"
)
list(
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
parse_requirements(spec % (self.egg_name, self.egg_version))
)
except ValueError:
raise distutils.errors.DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" %
(self.egg_name,self.egg_version)
(self.egg_name, self.egg_version)
)
if self.egg_base is None:
dirs = self.distribution.package_dir
self.egg_base = (dirs or {}).get('',os.curdir)
self.egg_base = (dirs or {}).get('', os.curdir)
self.ensure_dirname('egg_base')
self.egg_info = to_filename(self.egg_name)+'.egg-info'
self.egg_info = to_filename(self.egg_name) + '.egg-info'
if self.egg_base != os.curdir:
self.egg_info = os.path.join(self.egg_base, self.egg_info)
if '-' in self.egg_name: self.check_broken_egg_info()
if '-' in self.egg_name:
self.check_broken_egg_info()
# Set package version for the benefit of dumber commands
# (e.g. sdist, bdist_wininst, etc.)
@ -95,7 +111,7 @@ class egg_info(Command):
# to the version info
#
pd = self.distribution._patched_dist
if pd is not None and pd.key==self.egg_name.lower():
if pd is not None and pd.key == self.egg_name.lower():
pd._version = self.egg_version
pd._parsed_version = parse_version(self.egg_version)
self.distribution._patched_dist = None
@ -127,7 +143,7 @@ class egg_info(Command):
to the file.
"""
log.info("writing %s to %s", what, filename)
if sys.version_info >= (3,):
if PY3:
data = data.encode("utf-8")
if not self.dry_run:
f = open(filename, 'wb')
@ -153,7 +169,7 @@ class egg_info(Command):
installer = self.distribution.fetch_build_egg
for ep in iter_entry_points('egg_info.writers'):
writer = ep.load(installer=installer)
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
# Get rid of native_libs.txt if it was put there by older bdist_egg
nl = os.path.join(self.egg_info, "native_libs.txt")
@ -165,68 +181,96 @@ class egg_info(Command):
def tags(self):
version = ''
if self.tag_build:
version+=self.tag_build
if self.tag_svn_revision and (
os.path.exists('.svn') or os.path.exists('PKG-INFO')
): version += '-r%s' % self.get_svn_revision()
version += self.tag_build
if self.tag_svn_revision:
rev = self.get_svn_revision()
if rev: # is 0 if it's not an svn working copy
version += '-r%s' % rev
if self.tag_date:
import time
version += time.strftime("-%Y%m%d")
return version
@staticmethod
def get_svn_revision():
if 'svn_utils' not in globals():
return "0"
return str(svn_utils.SvnInfo.load(os.curdir).get_revision())
def find_sources(self):
"""Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
mm = manifest_maker(self.distribution)
mm.manifest = manifest_filename
mm.run()
self.filelist = mm.filelist
def check_broken_egg_info(self):
bei = self.egg_name+'.egg-info'
bei = self.egg_name + '.egg-info'
if self.egg_base != os.curdir:
bei = os.path.join(self.egg_base, bei)
if os.path.exists(bei):
log.warn(
"-"*78+'\n'
"-" * 78 + '\n'
"Note: Your current .egg-info directory has a '-' in its name;"
'\nthis will not work correctly with "setup.py develop".\n\n'
'Please rename %s to %s to correct this problem.\n'+'-'*78,
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
bei, self.egg_info
)
self.broken_egg_info = self.egg_info
self.egg_info = bei # make it work for now
self.egg_info = bei # make it work for now
class FileList(_FileList):
"""File list that accepts only existing, platform-independent paths"""
def append(self, item):
if item.endswith('\r'): # Fix older sdists built on Windows
if item.endswith('\r'): # Fix older sdists built on Windows
item = item[:-1]
path = convert_path(item)
if sys.version_info >= (3,):
try:
if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
self.files.append(path)
except UnicodeEncodeError:
# Accept UTF-8 filenames even if LANG=C
if os.path.exists(path.encode('utf-8')):
self.files.append(path)
else:
log.warn("'%s' not %s encodable -- skipping", path,
sys.getfilesystemencoding())
else:
if os.path.exists(path):
self.files.append(path)
if self._safe_path(path):
self.files.append(path)
def extend(self, paths):
self.files.extend(filter(self._safe_path, paths))
def _repair(self):
"""
Replace self.files with only safe paths
Because some owners of FileList manipulate the underlying
``files`` attribute directly, this method must be called to
repair those paths.
"""
self.files = list(filter(self._safe_path, self.files))
def _safe_path(self, path):
enc_warn = "'%s' not %s encodable -- skipping"
# To avoid accidental trans-codings errors, first to unicode
u_path = unicode_utils.filesys_decode(path)
if u_path is None:
log.warn("'%s' in unexpected encoding -- skipping" % path)
return False
# Must ensure utf-8 encodability
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
if utf8_path is None:
log.warn(enc_warn, path, 'utf-8')
return False
try:
# accept is either way checks out
if os.path.exists(u_path) or os.path.exists(utf8_path):
return True
# this will catch any encode errors decoding u_path
except UnicodeEncodeError:
log.warn(enc_warn, path, sys.getfilesystemencoding())
class manifest_maker(sdist):
template = "MANIFEST.in"
def initialize_options(self):
@ -241,7 +285,7 @@ class manifest_maker(sdist):
def run(self):
self.filelist = FileList()
if not os.path.exists(self.manifest):
self.write_manifest() # it must exist so it'll get in the list
self.write_manifest() # it must exist so it'll get in the list
self.filelist.findall()
self.add_defaults()
if os.path.exists(self.template):
@ -251,30 +295,23 @@ class manifest_maker(sdist):
self.filelist.remove_duplicates()
self.write_manifest()
def _manifest_normalize(self, path):
path = unicode_utils.filesys_decode(path)
return path.replace(os.sep, '/')
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
"""
Write the file list in 'self.filelist' to the manifest file
named by 'self.manifest'.
"""
# The manifest must be UTF-8 encodable. See #303.
if sys.version_info >= (3,):
files = []
for file in self.filelist.files:
try:
file.encode("utf-8")
except UnicodeEncodeError:
log.warn("'%s' not UTF-8 encodable -- skipping" % file)
else:
files.append(file)
self.filelist.files = files
self.filelist._repair()
files = self.filelist.files
if os.sep!='/':
files = [f.replace(os.sep,'/') for f in files]
self.execute(write_file, (self.manifest, files),
"writing manifest file '%s'" % self.manifest)
# Now _repairs should encodability, but not unicode
files = [self._manifest_normalize(f) for f in self.filelist.files]
msg = "writing manifest file '%s'" % self.manifest
self.execute(write_file, (self.manifest, files), msg)
def warn(self, msg): # suppress missing-file warnings from sdist
def warn(self, msg): # suppress missing-file warnings from sdist
if not msg.startswith("standard file not found:"):
sdist.warn(self, msg)
@ -288,15 +325,41 @@ class manifest_maker(sdist):
elif os.path.exists(self.manifest):
self.read_manifest()
ei_cmd = self.get_finalized_command('egg_info')
self._add_egg_info(cmd=ei_cmd)
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
def _add_egg_info(self, cmd):
"""
Add paths for egg-info files for an external egg-base.
The egg-info files are written to egg-base. If egg-base is
outside the current working directory, this method
searchs the egg-base directory for files to include
in the manifest. Uses distutils.filelist.findall (which is
really the version monkeypatched in by setuptools/__init__.py)
to perform the search.
Since findall records relative paths, prefix the returned
paths with cmd.egg_base, so add_default's include_pattern call
(which is looking for the absolute cmd.egg_info) will match
them.
"""
if cmd.egg_base == os.curdir:
# egg-info files were already added by something else
return
discovered = distutils.filelist.findall(cmd.egg_base)
resolved = (os.path.join(cmd.egg_base, path) for path in discovered)
self.filelist.allfiles.extend(resolved)
def prune_file_list(self):
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
sep = re.escape(os.sep)
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
is_regex=1)
def write_file(filename, contents):
@ -304,11 +367,13 @@ def write_file(filename, contents):
sequence of strings without line terminators) to it.
"""
contents = "\n".join(contents)
if sys.version_info >= (3,):
contents = contents.encode("utf-8")
f = open(filename, "wb") # always write POSIX-style manifest
f.write(contents)
f.close()
# assuming the contents has been vetted for utf-8 encoding
contents = contents.encode("utf-8")
with open(filename, "wb") as f: # always write POSIX-style manifest
f.write(contents)
def write_pkg_info(cmd, basename, filename):
log.info("writing %s", filename)
@ -323,10 +388,12 @@ def write_pkg_info(cmd, basename, filename):
finally:
metadata.name, metadata.version = oldname, oldver
safe = getattr(cmd.distribution,'zip_safe',None)
safe = getattr(cmd.distribution, 'zip_safe', None)
from setuptools.command import bdist_egg
bdist_egg.write_safety_flag(cmd.egg_info, safe)
def warn_depends_obsolete(cmd, basename, filename):
if os.path.exists(filename):
log.warn(
@ -335,55 +402,75 @@ def warn_depends_obsolete(cmd, basename, filename):
)
def _write_requirements(stream, reqs):
lines = yield_lines(reqs or ())
append_cr = lambda line: line + '\n'
lines = map(append_cr, lines)
stream.writelines(lines)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
for extra,reqs in (dist.extras_require or {}).items():
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
cmd.write_or_delete_file("requirements", filename, ''.join(data))
data = StringIO()
_write_requirements(data, dist.install_requires)
extras_require = dist.extras_require or {}
for extra in sorted(extras_require):
data.write('\n[{extra}]\n'.format(**vars()))
_write_requirements(data, extras_require[extra])
cmd.write_or_delete_file("requirements", filename, data.getvalue())
def write_setup_requirements(cmd, basename, filename):
data = StringIO()
_write_requirements(data, cmd.distribution.setup_requires)
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys(
[
k.split('.',1)[0]
k.split('.', 1)[0]
for k in cmd.distribution.iter_distribution_names()
]
)
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
def overwrite_arg(cmd, basename, filename):
write_arg(cmd, basename, filename, True)
def write_arg(cmd, basename, filename, force=False):
argname = os.path.splitext(basename)[0]
value = getattr(cmd.distribution, argname, None)
if value is not None:
value = '\n'.join(value)+'\n'
value = '\n'.join(value) + '\n'
cmd.write_or_delete_file(argname, filename, value, force)
def write_entries(cmd, basename, filename):
ep = cmd.distribution.entry_points
if isinstance(ep,basestring) or ep is None:
if isinstance(ep, basestring) or ep is None:
data = ep
elif ep is not None:
data = []
for section, contents in ep.items():
if not isinstance(contents,basestring):
for section, contents in sorted(ep.items()):
if not isinstance(contents, basestring):
contents = EntryPoint.parse_group(section, contents)
contents = '\n'.join(map(str,contents.values()))
data.append('[%s]\n%s\n\n' % (section,contents))
contents = '\n'.join(sorted(map(str, contents.values())))
data.append('[%s]\n%s\n\n' % (section, contents))
data = ''.join(data)
cmd.write_or_delete_file('entry points', filename, data, True)
def get_pkg_info_revision():
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
# a subversion revision
#
if os.path.exists('PKG-INFO'):
f = open('PKG-INFO','rU')
f = open('PKG-INFO', 'rU')
for line in f:
match = re.match(r"Version:.*-r(\d+)\s*$", line)
if match:

View File

@ -1,18 +1,26 @@
import setuptools
import sys
import glob
from distutils.command.install import install as _install
from distutils.errors import DistutilsArgError
import inspect
import glob
import warnings
import platform
import distutils.command.install as orig
class install(_install):
import setuptools
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
# now. See https://bitbucket.org/pypa/setuptools/issue/199/
_install = orig.install
class install(orig.install):
"""Use easy_install to install the package, w/dependencies"""
user_options = _install.user_options + [
user_options = orig.install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None,
"used by system package builders to create 'flat' eggs"),
"used by system package builders to create 'flat' eggs"),
]
boolean_options = _install.boolean_options + [
boolean_options = orig.install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed',
]
new_commands = [
@ -22,13 +30,12 @@ class install(_install):
_nc = dict(new_commands)
def initialize_options(self):
_install.initialize_options(self)
orig.install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
self.no_compile = None # make DISTUTILS_DEBUG work right!
def finalize_options(self):
_install.finalize_options(self)
orig.install.finalize_options(self)
if self.root:
self.single_version_externally_managed = True
elif self.single_version_externally_managed:
@ -41,7 +48,7 @@ class install(_install):
def handle_extra_path(self):
if self.root or self.single_version_externally_managed:
# explicit backward-compatibility mode, allow extra_path to work
return _install.handle_extra_path(self)
return orig.install.handle_extra_path(self)
# Ignore extra_path when installing an egg (or being run by another
# command without --root or --single-version-externally-managed
@ -51,28 +58,41 @@ class install(_install):
def run(self):
# Explicit request for old-style install? Just do it
if self.old_and_unmanageable or self.single_version_externally_managed:
return _install.run(self)
return orig.install.run(self)
# Attempt to detect whether we were called from setup() or by another
# command. If we were called by setup(), our caller will be the
# 'run_command' method in 'distutils.dist', and *its* caller will be
# the 'run_commands' method. If we were called any other way, our
# immediate caller *might* be 'run_command', but it won't have been
# called by 'run_commands'. This is slightly kludgy, but seems to
# work.
#
caller = sys._getframe(2)
caller_module = caller.f_globals.get('__name__','')
caller_name = caller.f_code.co_name
if caller_module != 'distutils.dist' or caller_name!='run_commands':
# We weren't called from the command line or setup(), so we
# should run in backward-compatibility mode to support bdist_*
# commands.
_install.run(self)
if not self._called_from_setup(inspect.currentframe()):
# Run in backward-compatibility mode to support bdist_* commands.
orig.install.run(self)
else:
self.do_egg_install()
@staticmethod
def _called_from_setup(run_frame):
"""
Attempt to detect whether run() was called from setup() or by another
command. If called by setup(), the parent caller will be the
'run_command' method in 'distutils.dist', and *its* caller will be
the 'run_commands' method. If called any other way, the
immediate caller *might* be 'run_command', but it won't have been
called by 'run_commands'. Return True in that case or if a call stack
is unavailable. Return False otherwise.
"""
if run_frame is None:
msg = "Call stack not available. bdist_* commands may fail."
warnings.warn(msg)
if platform.python_implementation() == 'IronPython':
msg = "For best results, pass -X:Frames to enable call stack."
warnings.warn(msg)
return True
res = inspect.getouterframes(run_frame)[2]
caller, = res[:1]
info = inspect.getframeinfo(caller)
caller_module = caller.f_globals.get('__name__', '')
return (
caller_module == 'distutils.dist'
and info.function == 'run_commands'
)
def do_egg_install(self):
easy_install = self.distribution.get_command_class('easy_install')
@ -97,7 +117,9 @@ class install(_install):
cmd.run()
setuptools.bootstrap_install_from = None
# XXX Python 3.1 doesn't see _nc if this is inside the class
install.sub_commands = [
cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
] + install.new_commands
install.sub_commands = (
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
install.new_commands
)

View File

@ -1,7 +1,10 @@
from distutils import log, dir_util
import os
from setuptools import Command
from setuptools.archive_util import unpack_archive
from distutils import log, dir_util
import os, shutil, pkg_resources
import pkg_resources
class install_egg_info(Command):
"""Install an .egg-info directory for the package"""
@ -16,26 +19,26 @@ class install_egg_info(Command):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',('install_dir','install_dir'))
self.set_undefined_options('install_lib',
('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name()+'.egg-info'
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
self.run_command('egg_info')
target = self.target
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink,(self.target,),"Removing "+self.target)
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(self.copytree, (),
"Copying %s to %s" % (self.source, self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
@ -44,82 +47,70 @@ class install_egg_info(Command):
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src,dst):
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/','CVS/':
if src.startswith(skip) or '/'+skip in src:
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp: return
filename,ext = os.path.splitext(self.target)
filename += '-nspkg.pth'; self.outputs.append(filename)
log.info("Installing %s",filename)
if not self.dry_run:
f = open(filename,'wt')
for pkg in nsp:
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
trailer = '\n'
if '.' in pkg:
trailer = (
"; m and setattr(sys.modules[%r], %r, m)\n"
% ('.'.join(pth[:-1]), pth[-1])
)
f.write(
"import sys,types,os; "
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
"*%(pth)r); "
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
"m = not ie and "
"sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
"(p not in mp) and mp.append(p)%(trailer)s"
% locals()
)
f.close()
if not nsp:
return
filename, ext = os.path.splitext(self.target)
filename += '-nspkg.pth'
self.outputs.append(filename)
log.info("Installing %s", filename)
lines = map(self._gen_nspkg_line, nsp)
if self.dry_run:
# always generate the lines, even in dry run
list(lines)
return
with open(filename, 'wt') as f:
f.writelines(lines)
_nspkg_tmpl = (
"import sys, types, os",
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
"ie = os.path.exists(os.path.join(p,'__init__.py'))",
"m = not ie and "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
"lines for the namespace installer"
_nspkg_tmpl_multi = (
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
)
"additional line(s) when a parent package is indicated"
@classmethod
def _gen_nspkg_line(cls, pkg):
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
tmpl_lines = cls._nspkg_tmpl
parent, sep, child = pkg.rpartition('.')
if parent:
tmpl_lines += cls._nspkg_tmpl_multi
return ';'.join(tmpl_lines) % locals() + '\n'
def _get_all_ns_packages(self):
nsp = {}
"""Return sorted list of all package namespaces"""
nsp = set()
for pkg in self.distribution.namespace_packages or []:
pkg = pkg.split('.')
while pkg:
nsp['.'.join(pkg)] = 1
nsp.add('.'.join(pkg))
pkg.pop()
nsp=list(nsp)
nsp.sort() # set up shorter names first
return nsp
return sorted(nsp)

View File

@ -1,21 +1,11 @@
from distutils.command.install_lib import install_lib as _install_lib
import os
import imp
from itertools import product, starmap
import distutils.command.install_lib as orig
class install_lib(_install_lib):
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def _bytecode_filenames (self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
if not py_file.endswith('.py'):
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
def run(self):
self.build()
outfiles = self.install()
@ -24,30 +14,81 @@ class install_lib(_install_lib):
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
"""
Return a collections.Sized collections.Container of paths to be
excluded for single_version_externally_managed installations.
"""
all_packages = (
pkg
for ns_pkg in self._get_SVEM_NSPs()
for pkg in self._all_packages(ns_pkg)
)
if (nsp and self.get_finalized_command('install')
.single_version_externally_managed
):
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
exclude[os.path.join(pkgdir,f)] = 1
parts.pop()
return exclude
excl_specs = product(all_packages, self._gen_exclusion_paths())
return set(starmap(self._exclude_pkg_path, excl_specs))
def _exclude_pkg_path(self, pkg, exclusion_path):
"""
Given a package name and exclusion path within that package,
compute the full exclusion path.
"""
parts = pkg.split('.') + [exclusion_path]
return os.path.join(self.install_dir, *parts)
@staticmethod
def _all_packages(pkg_name):
"""
>>> list(install_lib._all_packages('foo.bar.baz'))
['foo.bar.baz', 'foo.bar', 'foo']
"""
while pkg_name:
yield pkg_name
pkg_name, sep, child = pkg_name.rpartition('.')
def _get_SVEM_NSPs(self):
"""
Get namespace packages (list) but only for
single_version_externally_managed installations and empty otherwise.
"""
# TODO: is it necessary to short-circuit here? i.e. what's the cost
# if get_finalized_command is called even when namespace_packages is
# False?
if not self.distribution.namespace_packages:
return []
install_cmd = self.get_finalized_command('install')
svem = install_cmd.single_version_externally_managed
return self.distribution.namespace_packages if svem else []
@staticmethod
def _gen_exclusion_paths():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
# always exclude the package module itself
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return _install_lib.copy_tree(self, infile, outfile)
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
@ -58,7 +99,8 @@ class install_lib(_install_lib):
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",dst)
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
@ -69,14 +111,8 @@ class install_lib(_install_lib):
return outfiles
def get_outputs(self):
outputs = _install_lib.get_outputs(self)
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs

View File

@ -1,23 +1,23 @@
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
import distutils.command.install_scripts as orig
import os
class install_scripts(_install_scripts):
from pkg_resources import Distribution, PathMetadata, ensure_directory
class install_scripts(orig.install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def initialize_options(self):
_install_scripts.initialize_options(self)
orig.install_scripts.initialize_options(self)
self.no_ep = False
def run(self):
from setuptools.command.easy_install import get_script_args
from setuptools.command.easy_install import sys_executable
import setuptools.command.easy_install as ei
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
orig.install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
if self.no_ep:
@ -30,16 +30,23 @@ class install_scripts(_install_scripts):
ei_cmd.egg_name, ei_cmd.egg_version,
)
bs_cmd = self.get_finalized_command('build_scripts')
executable = getattr(bs_cmd,'executable',sys_executable)
is_wininst = getattr(
self.get_finalized_command("bdist_wininst"), '_is_running', False
)
for args in get_script_args(dist, executable, is_wininst):
exec_param = getattr(bs_cmd, 'executable', None)
bw_cmd = self.get_finalized_command("bdist_wininst")
is_wininst = getattr(bw_cmd, '_is_running', False)
writer = ei.ScriptWriter
if is_wininst:
exec_param = "python.exe"
writer = ei.WindowsScriptWriter
# resolve the writer to the environment
writer = writer.best()
cmd = writer.command_spec_class.best().from_param(exec_param)
for args in writer.get_args(dist, cmd.as_header()):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
from setuptools.command.easy_install import chmod, current_umask
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
@ -47,8 +54,7 @@ class install_scripts(_install_scripts):
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f = open(target, "w" + mode)
f.write(contents)
f.close()
chmod(target, 0x1FF-mask) # 0777
chmod(target, 0o777 - mask)

View File

@ -1,15 +1,15 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%(name)s"
type="win32"/>
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%(name)s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>

View File

@ -1,10 +1,10 @@
from distutils.command.register import register as _register
import distutils.command.register as orig
class register(_register):
__doc__ = _register.__doc__
class register(orig.register):
__doc__ = orig.register.__doc__
def run(self):
# Make sure that we are using valid current name/version info
self.run_command('egg_info')
_register.run(self)
orig.register.run(self)

View File

@ -1,18 +1,20 @@
import distutils, os
from setuptools import Command
from setuptools.compat import basestring
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from distutils.errors import DistutilsOptionError
import os
from setuptools import Command
from setuptools.compat import basestring
class rotate(Command):
"""Delete older distributions"""
description = "delete older distributions, keeping N newest files"
user_options = [
('match=', 'm', "patterns to match (required)"),
('match=', 'm', "patterns to match (required)"),
('dist-dir=', 'd', "directory where the distributions are"),
('keep=', 'k', "number of matching distributions to keep"),
('keep=', 'k', "number of matching distributions to keep"),
]
boolean_options = []
@ -29,7 +31,7 @@ class rotate(Command):
"(e.g. '.zip' or '.egg')"
)
if self.keep is None:
raise DistutilsOptionError("Must specify number of files to keep")
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
except ValueError:
@ -38,46 +40,22 @@ class rotate(Command):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
def run(self):
self.run_command("egg_info")
from glob import glob
for pattern in self.match:
pattern = self.distribution.get_name()+'*'+pattern
files = glob(os.path.join(self.dist_dir,pattern))
files = [(os.path.getmtime(f),f) for f in files]
pattern = self.distribution.get_name() + '*' + pattern
files = glob(os.path.join(self.dist_dir, pattern))
files = [(os.path.getmtime(f), f) for f in files]
files.sort()
files.reverse()
log.info("%d file(s) matching %s", len(files), pattern)
files = files[self.keep:]
for (t,f) in files:
for (t, f) in files:
log.info("Deleting %s", f)
if not self.dry_run:
os.unlink(f)

View File

@ -1,7 +1,6 @@
import distutils, os
from setuptools import Command
from setuptools.command.setopt import edit_config, option_base
class saveopts(option_base):
"""Save command-line options to a file"""
@ -13,12 +12,11 @@ class saveopts(option_base):
for cmd in dist.command_options:
if cmd=='saveopts':
continue # don't save our own options!
if cmd == 'saveopts':
continue # don't save our own options!
for opt,(src,val) in dist.get_option_dict(cmd).items():
if src=="command line":
settings.setdefault(cmd,{})[opt] = val
for opt, (src, val) in dist.get_option_dict(cmd).items():
if src == "command line":
settings.setdefault(cmd, {})[opt] = val
edit_config(self.filename, settings, self.dry_run)

View File

@ -1,16 +1,17 @@
import os
import re
import sys
from glob import glob
from distutils import log
import distutils.command.sdist as orig
import os
import sys
from setuptools.compat import PY3
from setuptools.utils import cs_path_exists
import pkg_resources
from distutils.command.sdist import sdist as _sdist
from distutils.util import convert_path
from distutils import log
from setuptools import svn_utils
READMES = ('README', 'README.rst', 'README.txt')
READMES = 'README', 'README.rst', 'README.txt'
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
@ -19,60 +20,7 @@ def walk_revctrl(dirname=''):
yield item
#TODO will need test case
class re_finder(object):
"""
Finder that locates files based on entries in a file matched by a
regular expression.
"""
def __init__(self, path, pattern, postproc=lambda x: x):
self.pattern = pattern
self.postproc = postproc
self.entries_path = convert_path(path)
def _finder(self, dirname, filename):
f = open(filename,'rU')
try:
data = f.read()
finally:
f.close()
for match in self.pattern.finditer(data):
path = match.group(1)
# postproc was formerly used when the svn finder
# was an re_finder for calling unescape
path = self.postproc(path)
yield svn_utils.joinpath(dirname, path)
def find(self, dirname=''):
path = svn_utils.joinpath(dirname, self.entries_path)
if not os.path.isfile(path):
# entries file doesn't exist
return
for path in self._finder(dirname,path):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
for item in self.find(path):
yield item
__call__ = find
def _default_revctrl(dirname=''):
'Primary svn_cvs entry point'
for finder in finders:
for item in finder(dirname):
yield item
finders = [
re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)),
svn_utils.svn_finder,
]
class sdist(_sdist):
class sdist(orig.sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
@ -84,7 +32,7 @@ class sdist(_sdist):
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
]
negative_opt = {}
@ -92,7 +40,7 @@ class sdist(_sdist):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
self.check_readme()
# Run sub commands
@ -102,12 +50,13 @@ class sdist(_sdist):
# Call check_metadata only if no 'check' command
# (distutils <= 2.6)
import distutils.command
if 'check' not in distutils.command.__all__:
self.check_metadata()
self.make_distribution()
dist_files = getattr(self.distribution,'dist_files',[])
dist_files = getattr(self.distribution, 'dist_files', [])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
@ -119,17 +68,19 @@ class sdist(_sdist):
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
_sdist.read_template(self)
orig.sdist.read_template(self)
except:
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2,7,2)
or (3,0) <= sys.version_info < (3,1,4)
or (3,2) <= sys.version_info < (3,2,1)
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
@ -142,7 +93,7 @@ class sdist(_sdist):
alts = fn
got_it = 0
for fn in alts:
if os.path.exists(fn):
if cs_path_exists(fn):
got_it = 1
self.filelist.append(fn)
break
@ -151,14 +102,14 @@ class sdist(_sdist):
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if os.path.exists(fn):
if cs_path_exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = list(filter(os.path.isfile, glob(pattern)))
files = list(filter(cs_path_exists, glob(pattern)))
if files:
self.filelist.extend(files)
@ -193,15 +144,16 @@ class sdist(_sdist):
return
else:
self.warn(
"standard file not found: should have one of " +', '.join(READMES)
"standard file not found: should have one of " +
', '.join(READMES)
)
def make_release_tree(self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
orig.sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os,'link') and os.path.exists(dest):
if hasattr(os, 'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
@ -219,7 +171,8 @@ class sdist(_sdist):
first_line = fp.readline()
finally:
fp.close()
return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
return (first_line !=
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
@ -230,7 +183,7 @@ class sdist(_sdist):
manifest = open(self.manifest, 'rbU')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if sys.version_info >= (3,):
if PY3:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:

View File

@ -1,8 +1,11 @@
import distutils, os
from setuptools import Command
from distutils.util import convert_path
from distutils import log
from distutils.errors import *
from distutils.errors import DistutilsOptionError
import distutils
import os
from setuptools import Command
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
@ -12,33 +15,20 @@ def config_file(kind="local"):
`kind` must be one of "local", "global", or "user"
"""
if kind=='local':
if kind == 'local':
return 'setup.cfg'
if kind=='global':
if kind == 'global':
return os.path.join(
os.path.dirname(distutils.__file__),'distutils.cfg'
os.path.dirname(distutils.__file__), 'distutils.cfg'
)
if kind=='user':
dot = os.name=='posix' and '.' or ''
if kind == 'user':
dot = os.name == 'posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError(
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False):
"""Edit a configuration file to include `settings`
@ -48,6 +38,7 @@ def edit_config(filename, settings, dry_run=False):
A setting of ``None`` means to delete that setting.
"""
from setuptools.compat import ConfigParser
log.debug("Reading configuration from %s", filename)
opts = ConfigParser.RawConfigParser()
opts.read([filename])
@ -59,46 +50,49 @@ def edit_config(filename, settings, dry_run=False):
if not opts.has_section(section):
log.debug("Adding new section [%s] to %s", section, filename)
opts.add_section(section)
for option,value in options.items():
for option, value in options.items():
if value is None:
log.debug("Deleting %s.%s from %s",
log.debug(
"Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section,option)
opts.remove_option(section, option)
if not opts.options(section):
log.info("Deleting empty [%s] section from %s",
section, filename)
section, filename)
opts.remove_section(section)
else:
log.debug(
"Setting %s.%s to %r in %s",
section, option, value, filename
)
opts.set(section,option,value)
opts.set(section, option, value)
log.info("Writing %s", filename)
if not dry_run:
f = open(filename,'w'); opts.write(f); f.close()
with open(filename, 'w') as f:
opts.write(f)
class option_base(Command):
"""Abstract base class for commands that mess with config files"""
user_options = [
('global-config', 'g',
"save options to the site-wide distutils.cfg file"),
"save options to the site-wide distutils.cfg file"),
('user-config', 'u',
"save options to the current user's pydistutils.cfg file"),
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
"configuration file to use (default=setup.cfg)"),
]
boolean_options = [
'global-config', 'user-config',
]
]
def initialize_options(self):
self.global_config = None
self.user_config = None
self.user_config = None
self.filename = None
def finalize_options(self):
@ -111,14 +105,12 @@ class option_base(Command):
filenames.append(self.filename)
if not filenames:
filenames.append(config_file('local'))
if len(filenames)>1:
if len(filenames) > 1:
raise DistutilsOptionError(
"Must specify only one configuration file option",
filenames
)
self.filename, = filenames
self.filename, = filenames
class setopt(option_base):
@ -128,9 +120,9 @@ class setopt(option_base):
user_options = [
('command=', 'c', 'command to set an option for'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
@ -152,13 +144,7 @@ class setopt(option_base):
def run(self):
edit_config(
self.filename, {
self.command: {self.option.replace('-','_'):self.set_value}
self.command: {self.option.replace('-', '_'): self.set_value}
},
self.dry_run
)

View File

@ -1,12 +1,17 @@
from setuptools import Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader
import unittest
import sys
from pkg_resources import *
from pkg_resources import _namespace_packages
from unittest import TestLoader, main
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
working_set, _namespace_packages,
add_activation_listener, require, EntryPoint)
from setuptools import Command
from setuptools.compat import PY3
from setuptools.py31compat import unittest_main
class ScanningLoader(TestLoader):
def loadTestsFromModule(self, module):
"""Return a suite of all tests cases contained in the given module
@ -15,48 +20,45 @@ class ScanningLoader(TestLoader):
the return value to the tests.
"""
tests = []
if module.__name__!='setuptools.tests.doctest': # ugh
tests.append(TestLoader.loadTestsFromModule(self,module))
tests.append(TestLoader.loadTestsFromModule(self, module))
if hasattr(module, "additional_tests"):
tests.append(module.additional_tests())
if hasattr(module, '__path__'):
for file in resource_listdir(module.__name__, ''):
if file.endswith('.py') and file!='__init__.py':
submodule = module.__name__+'.'+file[:-3]
if file.endswith('.py') and file != '__init__.py':
submodule = module.__name__ + '.' + file[:-3]
else:
if resource_exists(
module.__name__, file+'/__init__.py'
):
submodule = module.__name__+'.'+file
if resource_exists(module.__name__, file + '/__init__.py'):
submodule = module.__name__ + '.' + file
else:
continue
tests.append(self.loadTestsFromName(submodule))
if len(tests)!=1:
if len(tests) != 1:
return self.suiteClass(tests)
else:
return tests[0] # don't create a nested suite for only one return
return tests[0] # don't create a nested suite for only one return
class test(Command):
"""Command to run unit tests after in-place build"""
description = "run unit tests after in-place build"
user_options = [
('test-module=','m', "Run 'test_suite' in specified module"),
('test-suite=','s',
"Test suite to run (e.g. 'some_module.test_suite')"),
('test-module=', 'm', "Run 'test_suite' in specified module"),
('test-suite=', 's',
"Test suite to run (e.g. 'some_module.test_suite')"),
('test-runner=', 'r', "Test runner to use"),
]
def initialize_options(self):
self.test_suite = None
self.test_module = None
self.test_loader = None
self.test_runner = None
def finalize_options(self):
@ -64,7 +66,7 @@ class test(Command):
if self.test_module is None:
self.test_suite = self.distribution.test_suite
else:
self.test_suite = self.test_module+".test_suite"
self.test_suite = self.test_module + ".test_suite"
elif self.test_module:
raise DistutilsOptionError(
"You may specify a module or a suite, but not both"
@ -73,16 +75,18 @@ class test(Command):
self.test_args = [self.test_suite]
if self.verbose:
self.test_args.insert(0,'--verbose')
self.test_args.insert(0, '--verbose')
if self.test_loader is None:
self.test_loader = getattr(self.distribution,'test_loader',None)
self.test_loader = getattr(self.distribution, 'test_loader', None)
if self.test_loader is None:
self.test_loader = "setuptools.command.test:ScanningLoader"
if self.test_runner is None:
self.test_runner = getattr(self.distribution, 'test_runner', None)
def with_project_on_sys_path(self, func):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False)
if with_2to3:
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
@ -122,10 +126,10 @@ class test(Command):
sys.modules.update(old_modules)
working_set.__init__()
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
@ -137,14 +141,11 @@ class test(Command):
self.announce('running "unittest %s"' % cmd)
self.with_project_on_sys_path(self.run_tests)
def run_tests(self):
import unittest
# Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used
# with namespace packages.
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
if PY3 and getattr(self.distribution, 'use_2to3', False):
module = self.test_args[-1].split('.')[0]
if module in _namespace_packages:
del_modules = []
@ -156,43 +157,19 @@ class test(Command):
del_modules.append(name)
list(map(sys.modules.__delitem__, del_modules))
loader_ep = EntryPoint.parse("x="+self.test_loader)
loader_class = loader_ep.load(require=False)
cks = loader_class()
unittest.main(
None, None, [unittest.__file__]+self.test_args,
testLoader = cks
unittest_main(
None, None, [unittest.__file__] + self.test_args,
testLoader=self._resolve_as_ep(self.test_loader),
testRunner=self._resolve_as_ep(self.test_runner),
)
@staticmethod
def _resolve_as_ep(val):
"""
Load the indicated attribute value, called, as a as if it were
specified as an entry point.
"""
if val is None:
return
parsed = EntryPoint.parse("x=" + val)
return parsed.resolve()()

View File

@ -5,6 +5,10 @@ Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
from base64 import standard_b64encode
from distutils import log
from distutils.errors import DistutilsOptionError
from distutils.command.upload import upload
import os
import socket
import zipfile
@ -12,14 +16,9 @@ import tempfile
import sys
import shutil
from base64 import standard_b64encode
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
from pkg_resources import iter_entry_points
from distutils import log
from distutils.errors import DistutilsOptionError
from distutils.command.upload import upload
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
errors = 'surrogateescape' if PY3 else 'strict'
@ -33,7 +32,6 @@ def b(s, encoding='utf-8'):
class upload_docs(upload):
description = 'Upload documentation to PyPI'
user_options = [
@ -42,7 +40,7 @@ class upload_docs(upload):
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
]
boolean_options = upload.boolean_options
def has_sphinx(self):
@ -159,7 +157,7 @@ class upload_docs(upload):
elif schema == 'https':
conn = httplib.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema "+schema)
raise AssertionError("unsupported schema " + schema)
data = ''
try:
@ -171,8 +169,7 @@ class upload_docs(upload):
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error:
e = sys.exc_info()[1]
except socket.error as e:
self.announce(str(e), log.ERROR)
return
@ -190,4 +187,4 @@ class upload_docs(upload):
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print('-'*75, r.read(), '-'*75)
print('-' * 75, r.read(), '-' * 75)

View File

@ -1,15 +1,15 @@
import sys
import itertools
if sys.version_info[0] < 3:
PY3 = False
PY3 = sys.version_info >= (3,)
PY2 = not PY3
if PY2:
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
execfile = execfile
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
@ -21,8 +21,6 @@ if sys.version_info[0] < 3:
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
next = lambda o: o.next()
numeric_types = (int, long, float)
unichr = unichr
unicode = unicode
bytes = str
@ -34,9 +32,8 @@ if sys.version_info[0] < 3:
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
else:
PY3 = True
if PY3:
basestring = str
import builtins
import configparser as ConfigParser
@ -51,8 +48,6 @@ else:
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
next = next
numeric_types = (int, float)
unichr = chr
unicode = str
bytes = bytes
@ -65,18 +60,6 @@ else:
)
filterfalse = itertools.filterfalse
def execfile(fn, globs=None, locs=None):
if globs is None:
globs = globals()
if locs is None:
locs = globs
f = open(fn, 'rb')
try:
source = f.read()
finally:
f.close()
exec(compile(source, fn, 'exec'), globs, locs)
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)

View File

@ -1,7 +1,9 @@
from __future__ import generators
import sys, imp, marshal
import sys
import imp
import marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion, LooseVersion
from distutils.version import StrictVersion
from setuptools import compat
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
@ -10,9 +12,8 @@ __all__ = [
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self,name,requested_version,module,homepage='',
attribute=None,format=None
):
def __init__(self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
@ -25,20 +26,17 @@ class Require:
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self,version):
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
@ -59,20 +57,18 @@ class Require:
except ImportError:
return None
v = get_module_constant(self.module,self.attribute,default,paths)
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self,paths=None):
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self,paths=None):
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
@ -103,7 +99,7 @@ def _iter_code(code):
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * long_type(65536)
extended_arg = arg * compat.long_type(65536)
continue
else:
@ -113,14 +109,6 @@ def _iter_code(code):
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
@ -140,28 +128,6 @@ def find_module(module, paths=None):
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
@ -171,7 +137,7 @@ def get_module_constant(module, symbol, default=-1, paths=None):
constant. Otherwise, return 'default'."""
try:
f, path, (suffix,mode,kind) = find_module(module,paths)
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
@ -187,23 +153,17 @@ def get_module_constant(module, symbol, default=-1, paths=None):
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module,f,path,(suffix,mode,kind))
return getattr(sys.modules[module],symbol,None)
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code,symbol,default)
return extract_constant(code, symbol, default)
def extract_constant(code,symbol,default=-1):
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
@ -236,11 +196,20 @@ def extract_constant(code,symbol,default=-1):
return const
else:
const = default
if sys.platform.startswith('java') or sys.platform == 'cli':
# XXX it'd be better to test assertions about bytecode instead...
del extract_constant, get_module_constant
__all__.remove('extract_constant')
__all__.remove('get_module_constant')
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()

View File

@ -4,17 +4,23 @@ import re
import os
import sys
import warnings
import numbers
import distutils.log
import distutils.core
import distutils.cmd
import distutils.dist
from distutils.core import Distribution as _Distribution
from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
DistutilsSetupError)
from setuptools.depends import Require
from setuptools.compat import numeric_types, basestring
from setuptools.compat import basestring, PY2
from setuptools import windows_support
import pkg_resources
packaging = pkg_resources.packaging
def _get_unpatched(cls):
"""Protect against re-patching the distutils if reloaded
@ -31,6 +37,27 @@ def _get_unpatched(cls):
_Distribution = _get_unpatched(_Distribution)
def _patch_distribution_metadata_write_pkg_info():
"""
Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local
encoding to save the pkg_info. Monkey-patch its write_pkg_info method to
correct this undesirable behavior.
"""
environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)
if not environment_local:
return
# from Python 3.4
def write_pkg_info(self, base_dir):
"""Write the PKG-INFO file into the release tree.
"""
with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
encoding='UTF-8') as pkg_info:
self.write_pkg_file(pkg_info)
distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info
_patch_distribution_metadata_write_pkg_info()
sequence = tuple, list
def check_importable(dist, attr, value):
@ -104,8 +131,7 @@ def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError:
e = sys.exc_info()[1]
except ValueError as e:
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
@ -236,15 +262,36 @@ class Distribution(_Distribution):
self.dependency_links = attrs.pop('dependency_links', [])
assert_string_list(self,'dependency_links',self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs.pop('setup_requires'))
self.fetch_build_eggs(attrs['setup_requires'])
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
if not hasattr(self,ep.name):
setattr(self,ep.name,None)
_Distribution.__init__(self,attrs)
if isinstance(self.metadata.version, numeric_types):
if isinstance(self.metadata.version, numbers.Number):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
if self.metadata.version is not None:
try:
ver = packaging.version.Version(self.metadata.version)
normalized_version = str(ver)
if self.metadata.version != normalized_version:
warnings.warn(
"The version specified requires normalization, "
"consider using '%s' instead of '%s'." % (
normalized_version,
self.metadata.version,
)
)
self.metadata.version = normalized_version
except (packaging.version.InvalidVersion, TypeError):
warnings.warn(
"The version specified (%r) is an invalid version, this "
"may not work as expected with newer versions of "
"setuptools, pip, and PyPI. Please see PEP 440 for more "
"details." % self.metadata.version
)
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
@ -258,12 +305,13 @@ class Distribution(_Distribution):
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
from pkg_resources import working_set, parse_requirements
for dist in working_set.resolve(
parse_requirements(requires), installer=self.fetch_build_egg,
replace_conflicting=True
):
working_set.add(dist, replace=True)
resolved_dists = pkg_resources.working_set.resolve(
pkg_resources.parse_requirements(requires),
installer=self.fetch_build_egg,
replace_conflicting=True,
)
for dist in resolved_dists:
pkg_resources.working_set.add(dist, replace=True)
def finalize_options(self):
_Distribution.finalize_options(self)
@ -281,6 +329,21 @@ class Distribution(_Distribution):
else:
self.convert_2to3_doctests = []
def get_egg_cache_dir(self):
egg_cache_dir = os.path.join(os.curdir, '.eggs')
if not os.path.exists(egg_cache_dir):
os.mkdir(egg_cache_dir)
windows_support.hide_file(egg_cache_dir)
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
with open(readme_txt_filename, 'w') as f:
f.write('This directory contains eggs that were downloaded '
'by setuptools to build, test, and run plug-ins.\n\n')
f.write('This directory caches those eggs to prevent '
'repeated downloads.\n\n')
f.write('However, it is safe to delete this directory.\n\n')
return egg_cache_dir
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
@ -304,8 +367,9 @@ class Distribution(_Distribution):
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
install_dir = self.get_egg_cache_dir()
cmd = easy_install(
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report=True, user=False
)
@ -369,7 +433,8 @@ class Distribution(_Distribution):
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
cmdclass = ep.load(False) # don't require extras, we're not running
# don't require extras as the commands won't be invoked
cmdclass = ep.resolve()
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
@ -608,7 +673,7 @@ class Distribution(_Distribution):
"""
import sys
if sys.version_info < (3,) or self.help_commands:
if PY2 or self.help_commands:
return _Distribution.handle_display_options(self, option_order)
# Stdout may be StringIO (e.g. in tests)

View File

@ -1,11 +1,17 @@
import sys
import re
import functools
import distutils.core
import distutils.errors
import distutils.extension
from setuptools.dist import _get_unpatched
from .dist import _get_unpatched
from . import msvc9_support
_Extension = _get_unpatched(distutils.core.Extension)
msvc9_support.patch_for_specialized_compiler()
def have_pyrex():
"""
Return True if Cython or Pyrex can be imported.
@ -26,16 +32,21 @@ class Extension(_Extension):
def __init__(self, *args, **kw):
_Extension.__init__(self, *args, **kw)
if not have_pyrex():
self._convert_pyx_sources_to_c()
self._convert_pyx_sources_to_lang()
def _convert_pyx_sources_to_c(self):
"convert .pyx extensions to .c"
def pyx_to_c(source):
if source.endswith('.pyx'):
source = source[:-4] + '.c'
return source
self.sources = list(map(pyx_to_c, self.sources))
def _convert_pyx_sources_to_lang(self):
"""
Replace sources with .pyx extensions to sources with the target
language extension. This mechanism allows language authors to supply
pre-converted sources but to prefer the .pyx sources.
"""
if have_pyrex():
# the build has Cython, so allow it to compile the .pyx files
return
lang = self.language or ''
target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
sub = functools.partial(re.sub, '.pyx$', target_ext)
self.sources = list(map(sub, self.sources))
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""

View File

@ -0,0 +1,63 @@
try:
import distutils.msvc9compiler
except ImportError:
pass
unpatched = dict()
def patch_for_specialized_compiler():
"""
Patch functions in distutils.msvc9compiler to use the standalone compiler
build for Python (Windows only). Fall back to original behavior when the
standalone compiler is not available.
"""
if 'distutils' not in globals():
# The module isn't available to be patched
return
if unpatched:
# Already patched
return
unpatched.update(vars(distutils.msvc9compiler))
distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
distutils.msvc9compiler.query_vcvarsall = query_vcvarsall
def find_vcvarsall(version):
Reg = distutils.msvc9compiler.Reg
VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = VC_BASE % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
key = VC_BASE % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
import os
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
return unpatched['find_vcvarsall'](version)
def query_vcvarsall(version, *args, **kwargs):
try:
return unpatched['query_vcvarsall'](version, *args, **kwargs)
except distutils.errors.DistutilsPlatformError as exc:
if exc and "vcvarsall.bat" in exc.args[0]:
message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
if int(version) == 9:
# This redirection link is maintained by Microsoft.
# Contact vspython@microsoft.com if it needs updating.
raise distutils.errors.DistutilsPlatformError(
message + ' Get it from http://aka.ms/vcpython27'
)
raise distutils.errors.DistutilsPlatformError(message)
raise

View File

@ -632,16 +632,15 @@ class PackageIndex(Environment):
shutil.copy2(filename, dst)
filename=dst
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
)
file.close()
return filename
elif match:
@ -660,7 +659,7 @@ class PackageIndex(Environment):
def _download_to(self, url, filename):
self.info("Downloading %s", url)
# Download the file
fp, tfp, info = None, None, None
fp, info = None, None
try:
checker = HashChecker.from_url(url)
fp = self.open_url(strip_fragment(url))
@ -677,21 +676,20 @@ class PackageIndex(Environment):
sizes = get_all_headers(headers, 'Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
tfp = open(filename,'wb')
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
with open(filename,'wb') as tfp:
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
return headers
finally:
if fp: fp.close()
if tfp: tfp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
@ -701,25 +699,21 @@ class PackageIndex(Environment):
return local_open(url)
try:
return open_with_auth(url, self.opener)
except (ValueError, httplib.InvalidURL):
v = sys.exc_info()[1]
except (ValueError, httplib.InvalidURL) as v:
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
raise DistutilsError('%s %s' % (url, msg))
except urllib2.HTTPError:
v = sys.exc_info()[1]
except urllib2.HTTPError as v:
return v
except urllib2.URLError:
v = sys.exc_info()[1]
except urllib2.URLError as v:
if warning:
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v.reason))
except httplib.BadStatusLine:
v = sys.exc_info()[1]
except httplib.BadStatusLine as v:
if warning:
self.warn(warning, v.line)
else:
@ -728,8 +722,7 @@ class PackageIndex(Environment):
'down, %s' %
(url, v.line)
)
except httplib.HTTPException:
v = sys.exc_info()[1]
except httplib.HTTPException as v:
if warning:
self.warn(warning, v)
else:
@ -1040,9 +1033,8 @@ def local_open(url):
files = []
for f in os.listdir(filename):
if f=='index.html':
fp = open(os.path.join(filename,f),'r')
body = fp.read()
fp.close()
with open(os.path.join(filename,f),'r') as fp:
body = fp.read()
break
elif os.path.isdir(os.path.join(filename,f)):
f+='/'

View File

@ -1,3 +1,6 @@
import sys
import unittest
__all__ = ['get_config_vars', 'get_path']
try:
@ -9,3 +12,41 @@ except ImportError:
if name not in ('platlib', 'purelib'):
raise ValueError("Name must be purelib or platlib")
return get_python_lib(name=='platlib')
try:
# Python >=3.2
from tempfile import TemporaryDirectory
except ImportError:
import shutil
import tempfile
class TemporaryDirectory(object):
""""
Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion.
"""
def __init__(self):
self.name = None # Handle mkdtemp raising an exception
self.name = tempfile.mkdtemp()
def __enter__(self):
return self.name
def __exit__(self, exctype, excvalue, exctrace):
try:
shutil.rmtree(self.name, True)
except OSError: #removal errors are not the only possible
pass
self.name = None
unittest_main = unittest.main
_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2)
if _PY31:
# on Python 3.1, translate testRunner==None to TextTestRunner
# for compatibility with Python 2.6, 2.7, and 3.2+
def unittest_main(*args, **kwargs):
if 'testRunner' in kwargs and kwargs['testRunner'] is None:
kwargs['testRunner'] = unittest.TextTestRunner
return unittest.main(*args, **kwargs)

View File

@ -5,6 +5,8 @@ import operator
import functools
import itertools
import re
import contextlib
import pickle
import pkg_resources
@ -20,58 +22,221 @@ _open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
from setuptools.compat import builtins, execfile
from setuptools import compat
from setuptools.compat import builtins
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def _execfile(filename, globals, locals=None):
"""
Python 3 implementation of execfile.
"""
mode = 'rb'
# Python 2.6 compile requires LF for newlines, so use deprecated
# Universal newlines support.
if sys.version_info < (2, 7):
mode += 'U'
with open(filename, mode) as stream:
script = stream.read()
if locals is None:
locals = globals
code = compile(script, filename, 'exec')
exec(code, globals, locals)
@contextlib.contextmanager
def save_argv():
saved = sys.argv[:]
try:
yield saved
finally:
sys.argv[:] = saved
@contextlib.contextmanager
def save_path():
saved = sys.path[:]
try:
yield saved
finally:
sys.path[:] = saved
@contextlib.contextmanager
def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
if not os.path.isdir(replacement):
os.makedirs(replacement)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved
@contextlib.contextmanager
def pushd(target):
saved = os.getcwd()
os.chdir(target)
try:
yield saved
finally:
os.chdir(saved)
class UnpickleableException(Exception):
"""
An exception representing another Exception that could not be pickled.
"""
@classmethod
def dump(cls, type, exc):
"""
Always return a dumped (pickled) type and exc. If exc can't be pickled,
wrap it in UnpickleableException first.
"""
try:
return pickle.dumps(type), pickle.dumps(exc)
except Exception:
return cls.dump(cls, cls(repr(exc)))
class ExceptionSaver:
"""
A Context Manager that will save an exception, serialized, and restore it
later.
"""
def __enter__(self):
return self
def __exit__(self, type, exc, tb):
if not exc:
return
# dump the exception
self._saved = UnpickleableException.dump(type, exc)
self._tb = tb
# suppress the exception
return True
def resume(self):
"restore and re-raise any exception"
if '_saved' not in vars(self):
return
type, exc = map(pickle.loads, self._saved)
compat.reraise(type, exc, self._tb)
@contextlib.contextmanager
def save_modules():
"""
Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context.
"""
saved = sys.modules.copy()
with ExceptionSaver() as saved_exc:
yield saved
sys.modules.update(saved)
# remove any modules imported since
del_modules = (
mod_name for mod_name in sys.modules
if mod_name not in saved
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
)
_clear_modules(del_modules)
saved_exc.resume()
def _clear_modules(module_names):
for mod_name in list(module_names):
del sys.modules[mod_name]
@contextlib.contextmanager
def save_pkg_resources_state():
saved = pkg_resources.__getstate__()
try:
yield saved
finally:
pkg_resources.__setstate__(saved)
@contextlib.contextmanager
def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
hide_setuptools()
with save_path():
with save_argv():
with override_temp(temp_dir):
with pushd(setup_dir):
# ensure setuptools commands are available
__import__('setuptools')
yield
def _needs_hiding(mod_name):
"""
>>> _needs_hiding('setuptools')
True
>>> _needs_hiding('pkg_resources')
True
>>> _needs_hiding('setuptools_plugin')
False
>>> _needs_hiding('setuptools.__init__')
True
>>> _needs_hiding('distutils')
True
"""
pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)')
return bool(pattern.match(mod_name))
def hide_setuptools():
"""
Remove references to setuptools' modules from sys.modules to allow the
invocation to import the most appropriate setuptools. This technique is
necessary to avoid issues such as #315 where setuptools upgrading itself
would fail to find a function declared in the metadata.
"""
modules = filter(_needs_hiding, sys.modules)
_clear_modules(modules)
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
old_dir = os.getcwd()
save_argv = sys.argv[:]
save_path = sys.path[:]
setup_dir = os.path.abspath(os.path.dirname(setup_script))
temp_dir = os.path.join(setup_dir,'temp')
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
save_tmp = tempfile.tempdir
save_modules = sys.modules.copy()
pr_state = pkg_resources.__getstate__()
try:
tempfile.tempdir = temp_dir
os.chdir(setup_dir)
with setup_context(setup_dir):
try:
sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir)
# reset to include setup dir, w/clean callback list
working_set.__init__()
working_set.callbacks.append(lambda dist:dist.activate())
DirectorySandbox(setup_dir).run(
lambda: execfile(
"setup.py",
{'__file__':setup_script, '__name__':'__main__'}
)
)
except SystemExit:
v = sys.exc_info()[1]
def runner():
ns = dict(__file__=setup_script, __name__='__main__')
_execfile(setup_script, ns)
DirectorySandbox(setup_dir).run(runner)
except SystemExit as v:
if v.args and v.args[0]:
raise
# Normal exit, just return
finally:
pkg_resources.__setstate__(pr_state)
sys.modules.update(save_modules)
# remove any modules imported within the sandbox
del_modules = [
mod_name for mod_name in sys.modules
if mod_name not in save_modules
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
]
list(map(sys.modules.__delitem__, del_modules))
os.chdir(old_dir)
sys.path[:] = save_path
sys.argv[:] = save_argv
tempfile.tempdir = save_tmp
class AbstractSandbox:
@ -268,7 +433,7 @@ class DirectorySandbox(AbstractSandbox):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777
def open(self, file, flags, mode=0o777, *args, **kw):
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
self._violation("os.open", file, flags, mode, *args, **kw)

View File

@ -0,0 +1,5 @@
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
__requires__ = %(spec)r
__import__('pkg_resources').require(%(spec)r)
__file__ = %(dev_path)r
exec(compile(open(__file__).read(), __file__, 'exec'))

View File

@ -0,0 +1,3 @@
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
__requires__ = %(spec)r
__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)

View File

@ -1,5 +1,6 @@
def __boot():
import sys, os, os.path
import sys
import os
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
@ -49,13 +50,13 @@ def __boot():
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d,nd = makepath(stdpath[0])
d, nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p,np = makepath(item)
p, np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
@ -67,17 +68,9 @@ def __boot():
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
if __name__=='site':
__boot()
del __boot

View File

@ -178,12 +178,19 @@ class VerifyingHTTPSConn(HTTPSConnection):
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), self.host)
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()

View File

@ -1,8 +1,6 @@
"""Tests for the 'setuptools' package"""
import sys
import os
import unittest
from setuptools.tests import doctest
import distutils.core
import distutils.cmd
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
@ -11,24 +9,13 @@ from distutils.core import Extension
from distutils.version import LooseVersion
from setuptools.compat import func_code
from setuptools.compat import func_code
import pytest
import setuptools.dist
import setuptools.depends as dep
from setuptools import Feature
from setuptools.depends import Require
def additional_tests():
import doctest, unittest
suite = unittest.TestSuite((
doctest.DocFileSuite(
os.path.join('tests', 'api_tests.txt'),
optionflags=doctest.ELLIPSIS, package='pkg_resources',
),
))
if sys.platform == 'win32':
suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
return suite
def makeSetup(**args):
"""Return distribution from 'setup(**args)', without executing commands"""
@ -43,7 +30,12 @@ def makeSetup(**args):
distutils.core._setup_stop_after = None
class DependsTests(unittest.TestCase):
needs_bytecode = pytest.mark.skipif(
not hasattr(dep, 'get_module_constant'),
reason="bytecode support not available",
)
class TestDepends:
def testExtractConst(self):
if not hasattr(dep, 'extract_constant'):
@ -56,86 +48,77 @@ class DependsTests(unittest.TestCase):
y = z
fc = func_code(f1)
# unrecognized name
self.assertEqual(dep.extract_constant(fc,'q', -1), None)
assert dep.extract_constant(fc,'q', -1) is None
# constant assigned
self.assertEqual(dep.extract_constant(fc,'x', -1), "test")
dep.extract_constant(fc,'x', -1) == "test"
# expression assigned
self.assertEqual(dep.extract_constant(fc,'y', -1), -1)
dep.extract_constant(fc,'y', -1) == -1
# recognized name, not assigned
self.assertEqual(dep.extract_constant(fc,'z', -1), None)
dep.extract_constant(fc,'z', -1) is None
def testFindModule(self):
self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
with pytest.raises(ImportError):
dep.find_module('no-such.-thing')
with pytest.raises(ImportError):
dep.find_module('setuptools.non-existent')
f,p,i = dep.find_module('setuptools.tests')
f.close()
@needs_bytecode
def testModuleExtract(self):
if not hasattr(dep, 'get_module_constant'):
# skip on non-bytecode platforms
return
from email import __version__
self.assertEqual(
dep.get_module_constant('email','__version__'), __version__
)
self.assertEqual(
dep.get_module_constant('sys','version'), sys.version
)
self.assertEqual(
dep.get_module_constant('setuptools.tests','__doc__'),__doc__
)
assert dep.get_module_constant('email','__version__') == __version__
assert dep.get_module_constant('sys','version') == sys.version
assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__
@needs_bytecode
def testRequire(self):
if not hasattr(dep, 'extract_constant'):
# skip on non-bytecode platformsh
return
req = Require('Email','1.0.3','email')
self.assertEqual(req.name, 'Email')
self.assertEqual(req.module, 'email')
self.assertEqual(req.requested_version, '1.0.3')
self.assertEqual(req.attribute, '__version__')
self.assertEqual(req.full_name(), 'Email-1.0.3')
assert req.name == 'Email'
assert req.module == 'email'
assert req.requested_version == '1.0.3'
assert req.attribute == '__version__'
assert req.full_name() == 'Email-1.0.3'
from email import __version__
self.assertEqual(req.get_version(), __version__)
self.assertTrue(req.version_ok('1.0.9'))
self.assertTrue(not req.version_ok('0.9.1'))
self.assertTrue(not req.version_ok('unknown'))
assert req.get_version() == __version__
assert req.version_ok('1.0.9')
assert not req.version_ok('0.9.1')
assert not req.version_ok('unknown')
self.assertTrue(req.is_present())
self.assertTrue(req.is_current())
assert req.is_present()
assert req.is_current()
req = Require('Email 3000','03000','email',format=LooseVersion)
self.assertTrue(req.is_present())
self.assertTrue(not req.is_current())
self.assertTrue(not req.version_ok('unknown'))
assert req.is_present()
assert not req.is_current()
assert not req.version_ok('unknown')
req = Require('Do-what-I-mean','1.0','d-w-i-m')
self.assertTrue(not req.is_present())
self.assertTrue(not req.is_current())
assert not req.is_present()
assert not req.is_current()
req = Require('Tests', None, 'tests', homepage="http://example.com")
self.assertEqual(req.format, None)
self.assertEqual(req.attribute, None)
self.assertEqual(req.requested_version, None)
self.assertEqual(req.full_name(), 'Tests')
self.assertEqual(req.homepage, 'http://example.com')
assert req.format is None
assert req.attribute is None
assert req.requested_version is None
assert req.full_name() == 'Tests'
assert req.homepage == 'http://example.com'
paths = [os.path.dirname(p) for p in __path__]
self.assertTrue(req.is_present(paths))
self.assertTrue(req.is_current(paths))
assert req.is_present(paths)
assert req.is_current(paths)
class DistroTests(unittest.TestCase):
class TestDistro:
def setUp(self):
def setup_method(self, method):
self.e1 = Extension('bar.ext',['bar.c'])
self.e2 = Extension('c.y', ['y.c'])
@ -147,21 +130,21 @@ class DistroTests(unittest.TestCase):
)
def testDistroType(self):
self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
assert isinstance(self.dist,setuptools.dist.Distribution)
def testExcludePackage(self):
self.dist.exclude_package('a')
self.assertEqual(self.dist.packages, ['b','c'])
assert self.dist.packages == ['b','c']
self.dist.exclude_package('b')
self.assertEqual(self.dist.packages, ['c'])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
assert self.dist.packages == ['c']
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1, self.e2]
self.dist.exclude_package('c')
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
assert self.dist.packages == []
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1]
# test removals from unspecified options
makeSetup().exclude_package('x')
@ -169,21 +152,21 @@ class DistroTests(unittest.TestCase):
def testIncludeExclude(self):
# remove an extension
self.dist.exclude(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2])
assert self.dist.ext_modules == [self.e2]
# add it back in
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
assert self.dist.ext_modules == [self.e2, self.e1]
# should not add duplicate
self.dist.include(ext_modules=[self.e1])
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
assert self.dist.ext_modules == [self.e2, self.e1]
def testExcludePackages(self):
self.dist.exclude(packages=['c','b','a'])
self.assertEqual(self.dist.packages, [])
self.assertEqual(self.dist.py_modules, ['x'])
self.assertEqual(self.dist.ext_modules, [self.e1])
assert self.dist.packages == []
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1]
def testEmpty(self):
dist = makeSetup()
@ -192,49 +175,41 @@ class DistroTests(unittest.TestCase):
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
def testContents(self):
self.assertTrue(self.dist.has_contents_for('a'))
assert self.dist.has_contents_for('a')
self.dist.exclude_package('a')
self.assertTrue(not self.dist.has_contents_for('a'))
assert not self.dist.has_contents_for('a')
self.assertTrue(self.dist.has_contents_for('b'))
assert self.dist.has_contents_for('b')
self.dist.exclude_package('b')
self.assertTrue(not self.dist.has_contents_for('b'))
assert not self.dist.has_contents_for('b')
self.assertTrue(self.dist.has_contents_for('c'))
assert self.dist.has_contents_for('c')
self.dist.exclude_package('c')
self.assertTrue(not self.dist.has_contents_for('c'))
assert not self.dist.has_contents_for('c')
def testInvalidIncludeExclude(self):
self.assertRaises(DistutilsSetupError,
self.dist.include, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, nonexistent_option='x'
)
self.assertRaises(DistutilsSetupError,
self.dist.include, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, packages={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.include, ext_modules={'x':'y'}
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, ext_modules={'x':'y'}
)
with pytest.raises(DistutilsSetupError):
self.dist.include(nonexistent_option='x')
with pytest.raises(DistutilsSetupError):
self.dist.exclude(nonexistent_option='x')
with pytest.raises(DistutilsSetupError):
self.dist.include(packages={'x':'y'})
with pytest.raises(DistutilsSetupError):
self.dist.exclude(packages={'x':'y'})
with pytest.raises(DistutilsSetupError):
self.dist.include(ext_modules={'x':'y'})
with pytest.raises(DistutilsSetupError):
self.dist.exclude(ext_modules={'x':'y'})
self.assertRaises(DistutilsSetupError,
self.dist.include, package_dir=['q']
)
self.assertRaises(DistutilsSetupError,
self.dist.exclude, package_dir=['q']
)
with pytest.raises(DistutilsSetupError):
self.dist.include(package_dir=['q'])
with pytest.raises(DistutilsSetupError):
self.dist.exclude(package_dir=['q'])
class FeatureTests(unittest.TestCase):
class TestFeatures:
def setUp(self):
def setup_method(self, method):
self.req = Require('Distutils','1.0.3','distutils')
self.dist = makeSetup(
features={
@ -256,80 +231,75 @@ class FeatureTests(unittest.TestCase):
)
def testDefaults(self):
self.assertTrue(not
Feature(
"test",standard=True,remove='x',available=False
).include_by_default()
)
self.assertTrue(
Feature("test",standard=True,remove='x').include_by_default()
)
assert not Feature(
"test",standard=True,remove='x',available=False
).include_by_default()
assert Feature("test",standard=True,remove='x').include_by_default()
# Feature must have either kwargs, removes, or require_features
self.assertRaises(DistutilsSetupError, Feature, "test")
with pytest.raises(DistutilsSetupError):
Feature("test")
def testAvailability(self):
self.assertRaises(
DistutilsPlatformError,
self.dist.features['dwim'].include_in, self.dist
)
with pytest.raises(DistutilsPlatformError):
self.dist.features['dwim'].include_in(self.dist)
def testFeatureOptions(self):
dist = self.dist
self.assertTrue(
assert (
('with-dwim',None,'include DWIM') in dist.feature_options
)
self.assertTrue(
assert (
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
)
self.assertTrue(
assert (
('with-bar',None,'include bar (default)') in dist.feature_options
)
self.assertTrue(
assert (
('without-bar',None,'exclude bar') in dist.feature_options
)
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
self.assertTrue(not 'without-baz' in dist.feature_negopt)
assert dist.feature_negopt['without-foo'] == 'with-foo'
assert dist.feature_negopt['without-bar'] == 'with-bar'
assert dist.feature_negopt['without-dwim'] == 'with-dwim'
assert (not 'without-baz' in dist.feature_negopt)
def testUseFeatures(self):
dist = self.dist
self.assertEqual(dist.with_foo,1)
self.assertEqual(dist.with_bar,0)
self.assertEqual(dist.with_baz,1)
self.assertTrue(not 'bar_et' in dist.py_modules)
self.assertTrue(not 'pkg.bar' in dist.packages)
self.assertTrue('pkg.baz' in dist.packages)
self.assertTrue('scripts/baz_it' in dist.scripts)
self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
self.assertEqual(dist.ext_modules,[])
self.assertEqual(dist.require_features, [self.req])
assert dist.with_foo == 1
assert dist.with_bar == 0
assert dist.with_baz == 1
assert (not 'bar_et' in dist.py_modules)
assert (not 'pkg.bar' in dist.packages)
assert ('pkg.baz' in dist.packages)
assert ('scripts/baz_it' in dist.scripts)
assert (('libfoo','foo/foofoo.c') in dist.libraries)
assert dist.ext_modules == []
assert dist.require_features == [self.req]
# If we ask for bar, it should fail because we explicitly disabled
# it on the command line
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
with pytest.raises(DistutilsOptionError):
dist.include_feature('bar')
def testFeatureWithInvalidRemove(self):
self.assertRaises(
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
)
with pytest.raises(SystemExit):
makeSetup(features={'x':Feature('x', remove='y')})
class TestCommandTests(unittest.TestCase):
class TestCommandTests:
def testTestIsCommand(self):
test_cmd = makeSetup().get_command_obj('test')
self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
assert (isinstance(test_cmd, distutils.cmd.Command))
def testLongOptSuiteWNoDefault(self):
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
ts1 = ts1.get_command_obj('test')
ts1.ensure_finalized()
self.assertEqual(ts1.test_suite, 'foo.tests.suite')
assert ts1.test_suite == 'foo.tests.suite'
def testDefaultSuite(self):
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
ts2.ensure_finalized()
self.assertEqual(ts2.test_suite, 'bar.tests.suite')
assert ts2.test_suite == 'bar.tests.suite'
def testDefaultWModuleOnCmdLine(self):
ts3 = makeSetup(
@ -337,16 +307,17 @@ class TestCommandTests(unittest.TestCase):
script_args=['test','-m','foo.tests']
).get_command_obj('test')
ts3.ensure_finalized()
self.assertEqual(ts3.test_module, 'foo.tests')
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
assert ts3.test_module == 'foo.tests'
assert ts3.test_suite == 'foo.tests.test_suite'
def testConflictingOptions(self):
ts4 = makeSetup(
script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
).get_command_obj('test')
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
with pytest.raises(DistutilsOptionError):
ts4.ensure_finalized()
def testNoSuite(self):
ts5 = makeSetup().get_command_obj('test')
ts5.ensure_finalized()
self.assertEqual(ts5.test_suite, None)
assert ts5.test_suite == None

View File

@ -0,0 +1,93 @@
import tempfile
import os
import shutil
import sys
import contextlib
import site
from ..compat import StringIO
@contextlib.contextmanager
def tempdir(cd=lambda dir:None, **kwargs):
temp_dir = tempfile.mkdtemp(**kwargs)
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment(**replacements):
"""
In a context, patch the environment with replacements. Pass None values
to clear the values.
"""
saved = dict(
(key, os.environ[key])
for key in replacements
if key in os.environ
)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
@contextlib.contextmanager
def argv(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
@contextlib.contextmanager
def quiet():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = StringIO()
new_stderr = sys.stderr = StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextlib.contextmanager
def save_user_site_setting():
saved = site.ENABLE_USER_SITE
try:
yield saved
finally:
site.ENABLE_USER_SITE = saved
@contextlib.contextmanager
def suppress_exceptions(*excs):
try:
yield
except excs:
pass

View File

@ -1,119 +1,10 @@
import os
import zipfile
import sys
import tempfile
import unittest
import shutil
import stat
import unicodedata
from subprocess import Popen as _Popen, PIPE as _PIPE
def _extract(self, member, path=None, pwd=None):
"""for zipfile py2.5 borrowed from cpython"""
if not isinstance(member, zipfile.ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return _extract_member(self, member, path, pwd)
def _extract_from_zip(self, name, dest_path):
dest_file = open(dest_path, 'wb')
try:
dest_file.write(self.read(name))
finally:
dest_file.close()
def _extract_member(self, member, targetpath, pwd):
"""for zipfile py2.5 borrowed from cpython"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
# Strip trailing path separator, unless it represents the root.
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
# don't include leading "/" from file name if present
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
_extract_from_zip(self, member.filename, targetpath)
return targetpath
def _remove_dir(target):
#on windows this seems to a problem
for dir_path, dirs, files in os.walk(target):
os.chmod(dir_path, stat.S_IWRITE)
for filename in files:
os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE)
shutil.rmtree(target)
class ZippedEnvironment(unittest.TestCase):
datafile = None
dataname = None
old_cwd = None
def setUp(self):
if self.datafile is None or self.dataname is None:
return
if not os.path.isfile(self.datafile):
self.old_cwd = None
return
self.old_cwd = os.getcwd()
self.temp_dir = tempfile.mkdtemp()
zip_file, source, target = [None, None, None]
try:
zip_file = zipfile.ZipFile(self.datafile)
for files in zip_file.namelist():
_extract(zip_file, files, self.temp_dir)
finally:
if zip_file:
zip_file.close()
del zip_file
os.chdir(os.path.join(self.temp_dir, self.dataname))
def tearDown(self):
#Assume setUp was never completed
if self.dataname is None or self.datafile is None:
return
try:
if self.old_cwd:
os.chdir(self.old_cwd)
_remove_dir(self.temp_dir)
except OSError:
#sigh?
pass
def _which_dirs(cmd):
result = set()
for path in os.environ.get('PATH', '').split(os.pathsep):
@ -147,10 +38,13 @@ def run_setup_py(cmd, pypath=None, path=None,
cmd = [sys.executable, "setup.py"] + list(cmd)
#regarding the shell argument, see: http://bugs.python.org/issue8557
# http://bugs.python.org/issue8557
shell = sys.platform == 'win32'
try:
proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE,
shell=(sys.platform == 'win32'), env=env)
proc = _Popen(
cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env,
)
data = proc.communicate()[data_stream]
except OSError:
@ -158,7 +52,8 @@ def run_setup_py(cmd, pypath=None, path=None,
#decode the console string if needed
if hasattr(data, "decode"):
data = data.decode() # should use the preffered encoding
# use the default encoding
data = data.decode()
data = unicodedata.normalize('NFC', data)
#communciate calls wait()

View File

@ -0,0 +1,27 @@
try:
from unittest import mock
except ImportError:
import mock
import pytest
from . import contexts
@pytest.yield_fixture
def user_override():
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
a context.
"""
with contexts.tempdir() as user_base:
with mock.patch('site.USER_BASE', user_base):
with contexts.tempdir() as user_site:
with mock.patch('site.USER_SITE', user_site):
with contexts.save_user_site_setting():
yield
@pytest.yield_fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig

View File

@ -0,0 +1,3 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
</body></html>

View File

@ -0,0 +1,4 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
<a href="../../external.html" rel="homepage">external homepage</a><br/>
</body></html>

View File

@ -1,14 +1,14 @@
import unittest
import sys
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2):
tarfile_open = _tarfile_open_ex
else:
tarfile_open = tarfile.open

View File

@ -1,11 +1,10 @@
"""Basic http server for tests to simulate PyPI or custom indexes
"""
import sys
import time
import threading
from setuptools.compat import BaseHTTPRequestHandler
from setuptools.compat import (urllib2, URLError, HTTPServer,
SimpleHTTPRequestHandler)
from setuptools.compat import HTTPServer, SimpleHTTPRequestHandler
class IndexServer(HTTPServer):
"""Basic single-threaded http server simulating a package index
@ -23,12 +22,8 @@ class IndexServer(HTTPServer):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
self._run = True
def serve(self):
while self._run:
self.handle_request()
def start(self):
self.thread = threading.Thread(target=self.serve)
self.thread = threading.Thread(target=self.serve_forever)
self.thread.start()
def stop(self):
@ -37,19 +32,7 @@ class IndexServer(HTTPServer):
# Let the server finish the last request and wait for a new one.
time.sleep(0.1)
# self.shutdown is not supported on python < 2.6, so just
# set _run to false, and make a request, causing it to
# terminate.
self._run = False
url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
try:
if sys.version_info >= (2, 6):
urllib2.urlopen(url, timeout=5)
else:
urllib2.urlopen(url)
except URLError:
# ignore any errors; all that's important is the request
pass
self.shutdown()
self.thread.join()
self.socket.close()
@ -77,6 +60,6 @@ class MockServer(HTTPServer, threading.Thread):
def run(self):
self.serve_forever()
@property
def url(self):
return 'http://localhost:%(server_port)s/' % vars(self)
url = property(url)

View File

@ -1,50 +1,32 @@
"""develop tests
"""
import sys
import os, re, shutil, tempfile, unittest
import tempfile
import site
import os
import re
import pytest
from distutils.errors import DistutilsError
from setuptools.compat import StringIO
from setuptools.command.bdist_egg import bdist_egg
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
from . import contexts
SETUP_PY = """\
from setuptools import setup
setup(name='foo', py_modules=['hi'])
"""
class TestDevelopTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
f = open('setup.py', 'w')
@pytest.yield_fixture
def setup_context(tmpdir):
with (tmpdir/'setup.py').open('w') as f:
f.write(SETUP_PY)
f.close()
f = open('hi.py', 'w')
with (tmpdir/'hi.py').open('w') as f:
f.write('1\n')
f.close()
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
with tmpdir.as_cwd():
yield tmpdir
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_bdist_egg(self):
class Test:
def test_bdist_egg(self, setup_context, user_override):
dist = Distribution(dict(
script_name='setup.py',
script_args=['bdist_egg'],
@ -52,18 +34,10 @@ class TestDevelopTest(unittest.TestCase):
py_modules=['hi']
))
os.makedirs(os.path.join('build', 'src'))
old_stdout = sys.stdout
sys.stdout = o = StringIO()
try:
with contexts.quiet():
dist.parse_command_line()
dist.run_commands()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place
[content] = os.listdir('dist')
self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content))
def test_suite():
return unittest.makeSuite(TestDevelopTest)
assert re.match('foo-0.0.0-py[23].\d.egg$', content)

View File

@ -1,20 +1,18 @@
"""build_ext tests
"""
import os, shutil, tempfile, unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
class TestBuildExt:
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
"""
Setuptools needs to give back the same
result as distutils, even if the fullname
is not in ext_map.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted

View File

@ -1,14 +1,12 @@
"""develop tests
"""
import sys
import os, shutil, tempfile, unittest
import tempfile
import os
import shutil
import site
import sys
import tempfile
from distutils.errors import DistutilsError
from setuptools.command.develop import develop
from setuptools.command import easy_install as easy_install_pkg
from setuptools.compat import StringIO
from setuptools.dist import Distribution
SETUP_PY = """\
@ -23,10 +21,10 @@ setup(name='foo',
INIT_PY = """print "foo"
"""
class TestDevelopTest(unittest.TestCase):
class TestDevelopTest:
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
def setup_method(self, method):
if hasattr(sys, 'real_prefix'):
return
# Directory structure
@ -50,8 +48,8 @@ class TestDevelopTest(unittest.TestCase):
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
def teardown_method(self, method):
if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return
os.chdir(self.old_cwd)
@ -62,7 +60,7 @@ class TestDevelopTest(unittest.TestCase):
site.USER_SITE = self.old_site
def test_develop(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
if hasattr(sys, 'real_prefix'):
return
dist = Distribution(
dict(name='foo',
@ -86,7 +84,7 @@ class TestDevelopTest(unittest.TestCase):
# let's see if we got our egg link at the right place
content = os.listdir(site.USER_SITE)
content.sort()
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
assert content == ['easy-install.pth', 'foo.egg-link']
# Check that we are using the right code.
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
@ -100,23 +98,6 @@ class TestDevelopTest(unittest.TestCase):
finally:
init_file.close()
if sys.version < "3":
self.assertEqual(init, 'print "foo"')
assert init == 'print "foo"'
else:
self.assertEqual(init, 'print("foo")')
def notest_develop_with_setup_requires(self):
wanted = ("Could not find suitable distribution for "
"Requirement.parse('I-DONT-EXIST')")
old_dir = os.getcwd()
os.chdir(self.dir)
try:
try:
dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
except DistutilsError:
e = sys.exc_info()[1]
error = str(e)
if error == wanted:
pass
finally:
os.chdir(old_dir)
assert init == 'print("foo")'

View File

@ -3,28 +3,20 @@
import os
import shutil
import tempfile
import unittest
import textwrap
try:
import ast
except:
pass
import pytest
import pkg_resources
from .textwrap import DALS
from setuptools.tests.py26compat import skipIf
def DALS(s):
"dedent and left-strip"
return textwrap.dedent(s).lstrip()
class TestDistInfo(unittest.TestCase):
class TestDistInfo:
def test_distinfo(self):
dists = {}
for d in pkg_resources.find_distributions(self.tmpdir):
dists[d.project_name] = d
dists = dict(
(d.project_name, d)
for d in pkg_resources.find_distributions(self.tmpdir)
)
assert len(dists) == 2, dists
@ -34,50 +26,45 @@ class TestDistInfo(unittest.TestCase):
assert versioned.version == '2.718' # from filename
assert unversioned.version == '0.3' # from METADATA
@skipIf('ast' not in globals(),
"ast is used to test conditional dependencies (Python >= 2.6)")
@pytest.mark.importorskip('ast')
def test_conditional_dependencies(self):
requires = [pkg_resources.Requirement.parse('splort==4'),
pkg_resources.Requirement.parse('quux>=1.1')]
specs = 'splort==4', 'quux>=1.1'
requires = list(map(pkg_resources.Requirement.parse, specs))
for d in pkg_resources.find_distributions(self.tmpdir):
self.assertEqual(d.requires(), requires[:1])
self.assertEqual(d.requires(extras=('baz',)), requires)
self.assertEqual(d.extras, ['baz'])
assert d.requires() == requires[:1]
assert d.requires(extras=('baz',)) == requires
assert d.extras == ['baz']
def setUp(self):
metadata_template = DALS("""
Metadata-Version: 1.2
Name: {name}
{version}
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
""")
def setup_method(self, method):
self.tmpdir = tempfile.mkdtemp()
versioned = os.path.join(self.tmpdir,
'VersionedDistribution-2.718.dist-info')
dist_info_name = 'VersionedDistribution-2.718.dist-info'
versioned = os.path.join(self.tmpdir, dist_info_name)
os.mkdir(versioned)
metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+')
try:
metadata_file.write(DALS(
"""
Metadata-Version: 1.2
Name: VersionedDistribution
Requires-Dist: splort (4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
unversioned = os.path.join(self.tmpdir,
'UnversionedDistribution.dist-info')
with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file:
metadata = self.metadata_template.format(
name='VersionedDistribution',
version='',
).replace('\n\n', '\n')
metadata_file.write(metadata)
dist_info_name = 'UnversionedDistribution.dist-info'
unversioned = os.path.join(self.tmpdir, dist_info_name)
os.mkdir(unversioned)
metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+')
try:
metadata_file.write(DALS(
"""
Metadata-Version: 1.2
Name: UnversionedDistribution
Version: 0.3
Requires-Dist: splort (==4)
Provides-Extra: baz
Requires-Dist: quux (>=1.1); extra == 'baz'
"""))
finally:
metadata_file.close()
with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file:
metadata = self.metadata_template.format(
name='UnversionedDistribution',
version='Version: 0.3',
)
metadata_file.write(metadata)
def tearDown(self):
def teardown_method(self, method):
shutil.rmtree(self.tmpdir)

View File

@ -1,29 +1,44 @@
# -*- coding: utf-8 -*-
"""Easy install Tests
"""
from __future__ import absolute_import
import sys
import os
import shutil
import tempfile
import unittest
import site
import contextlib
import textwrap
import tarfile
import logging
import distutils.core
import itertools
import distutils.errors
from setuptools.compat import StringIO, BytesIO, next, urlparse
from setuptools.sandbox import run_setup, SandboxViolation
from setuptools.command.easy_install import (
easy_install, fix_jython_executable, get_script_args, nt_quote_arg)
import pytest
try:
from unittest import mock
except ImportError:
import mock
from setuptools import sandbox
from setuptools import compat
from setuptools.compat import StringIO, BytesIO, urlparse
from setuptools.sandbox import run_setup
import setuptools.command.easy_install as ei
from setuptools.command.easy_install import PthDistributions
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
from pkg_resources import working_set, VersionConflict
from pkg_resources import working_set
from pkg_resources import Distribution as PRDistribution
import setuptools.tests.server
import pkg_resources
from .py26compat import tarfile_open
from . import contexts
from .textwrap import DALS
class FakeDist(object):
def get_entry_map(self, group):
if group != 'console_scripts':
@ -33,134 +48,123 @@ class FakeDist(object):
def as_requirement(self):
return 'spec'
WANTED = """\
#!%s
# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
__requires__ = 'spec'
import sys
from pkg_resources import load_entry_point
SETUP_PY = DALS("""
from setuptools import setup
if __name__ == '__main__':
sys.exit(
load_entry_point('spec', 'console_scripts', 'name')()
)
""" % nt_quote_arg(fix_jython_executable(sys.executable, ""))
setup(name='foo')
""")
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestEasyInstallTest(unittest.TestCase):
class TestEasyInstallTest:
def test_install_site_py(self):
dist = Distribution()
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.sitepy_installed = False
cmd.install_dir = tempfile.mkdtemp()
try:
cmd.install_site_py()
sitepy = os.path.join(cmd.install_dir, 'site.py')
self.assertTrue(os.path.exists(sitepy))
assert os.path.exists(sitepy)
finally:
shutil.rmtree(cmd.install_dir)
def test_get_script_args(self):
header = ei.CommandSpec.best().from_environment().as_header()
expected = header + DALS("""
# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
__requires__ = 'spec'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('spec', 'console_scripts', 'name')()
)
""")
dist = FakeDist()
old_platform = sys.platform
try:
name, script = [i for i in next(get_script_args(dist))][0:2]
finally:
sys.platform = old_platform
args = next(ei.ScriptWriter.get_args(dist))
name, script = itertools.islice(args, 2)
self.assertEqual(script, WANTED)
assert script == expected
def test_no_find_links(self):
# new option '--no-find-links', that blocks find-links added at
# the project level
dist = Distribution()
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.check_pth_processing = lambda: True
cmd.no_find_links = True
cmd.find_links = ['link1', 'link2']
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
self.assertEqual(cmd.package_index.scanned_urls, {})
assert cmd.package_index.scanned_urls == {}
# let's try without it (default behavior)
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.check_pth_processing = lambda: True
cmd.find_links = ['link1', 'link2']
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
keys = sorted(cmd.package_index.scanned_urls.keys())
self.assertEqual(keys, ['link1', 'link2'])
assert keys == ['link1', 'link2']
def test_write_exception(self):
"""
Test that `cant_write_to_target` is rendered as a DistutilsError.
"""
dist = Distribution()
cmd = ei.easy_install(dist)
cmd.install_dir = os.getcwd()
with pytest.raises(distutils.errors.DistutilsError):
cmd.cant_write_to_target()
class TestPTHFileWriter(unittest.TestCase):
class TestPTHFileWriter:
def test_add_from_cwd_site_sets_dirty(self):
'''a pth file manager should set dirty
if a distribution is in site but also the cwd
'''
pth = PthDistributions('does-not_exist', [os.getcwd()])
self.assertTrue(not pth.dirty)
assert not pth.dirty
pth.add(PRDistribution(os.getcwd()))
self.assertTrue(pth.dirty)
assert pth.dirty
def test_add_from_site_is_ignored(self):
if os.name != 'nt':
location = '/test/location/does-not-have-to-exist'
else:
location = 'c:\\does_not_exist'
location = '/test/location/does-not-have-to-exist'
# PthDistributions expects all locations to be normalized
location = pkg_resources.normalize_path(location)
pth = PthDistributions('does-not_exist', [location, ])
self.assertTrue(not pth.dirty)
assert not pth.dirty
pth.add(PRDistribution(location))
self.assertTrue(not pth.dirty)
assert not pth.dirty
class TestUserInstallTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
@pytest.yield_fixture
def setup_context(tmpdir):
with (tmpdir/'setup.py').open('w') as f:
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
with tmpdir.as_cwd():
yield tmpdir
self.old_enable_site = site.ENABLE_USER_SITE
self.old_file = easy_install_pkg.__file__
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
easy_install_pkg.__file__ = site.USER_SITE
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
site.ENABLE_USER_SITE = self.old_enable_site
easy_install_pkg.__file__ = self.old_file
@pytest.mark.usefixtures("user_override")
@pytest.mark.usefixtures("setup_context")
class TestUserInstallTest:
@mock.patch('setuptools.command.easy_install.__file__', None)
def test_user_install_implied(self):
easy_install_pkg.__file__ = site.USER_SITE
site.ENABLE_USER_SITE = True # disabled sometimes
#XXX: replace with something meaningfull
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.args = ['py']
cmd.ensure_finalized()
self.assertTrue(cmd.user, 'user should be implied')
assert cmd.user, 'user should be implied'
def test_multiproc_atexit(self):
try:
@ -178,10 +182,10 @@ class TestUserInstallTest(unittest.TestCase):
#XXX: replace with something meaningfull
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.args = ['py']
cmd.initialize_options()
self.assertFalse(cmd.user, 'NOT user should be implied')
assert not cmd.user, 'NOT user should be implied'
def test_local_index(self):
# make sure the local index is used
@ -190,11 +194,8 @@ class TestUserInstallTest(unittest.TestCase):
new_location = tempfile.mkdtemp()
target = tempfile.mkdtemp()
egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
f = open(egg_file, 'w')
try:
with open(egg_file, 'w') as f:
f.write('Name: foo\n')
finally:
f.close()
sys.path.append(target)
old_ppath = os.environ.get('PYTHONPATH')
@ -202,14 +203,15 @@ class TestUserInstallTest(unittest.TestCase):
try:
dist = Distribution()
dist.script_name = 'setup.py'
cmd = easy_install(dist)
cmd = ei.easy_install(dist)
cmd.install_dir = target
cmd.args = ['foo']
cmd.ensure_finalized()
cmd.local_index.scan([new_location])
res = cmd.easy_install('foo')
self.assertEqual(os.path.realpath(res.location),
os.path.realpath(new_location))
actual = os.path.normcase(os.path.realpath(res.location))
expected = os.path.normcase(os.path.realpath(new_location))
assert actual == expected
finally:
sys.path.remove(target)
for basedir in [new_location, target, ]:
@ -224,6 +226,25 @@ class TestUserInstallTest(unittest.TestCase):
else:
del os.environ['PYTHONPATH']
@contextlib.contextmanager
def user_install_setup_context(self, *args, **kwargs):
"""
Wrap sandbox.setup_context to patch easy_install in that context to
appear as user-installed.
"""
with self.orig_context(*args, **kwargs):
import setuptools.command.easy_install as ei
ei.__file__ = site.USER_SITE
yield
def patched_setup_context(self):
self.orig_context = sandbox.setup_context
return mock.patch(
'setuptools.sandbox.setup_context',
self.user_install_setup_context,
)
def test_setup_requires(self):
"""Regression test for Distribute issue #318
@ -232,18 +253,37 @@ class TestUserInstallTest(unittest.TestCase):
SandboxViolation.
"""
test_pkg = create_setup_requires_package(self.dir)
test_pkg = create_setup_requires_package(os.getcwd())
test_setup_py = os.path.join(test_pkg, 'setup.py')
try:
with quiet_context():
with reset_setup_stop_context():
with contexts.quiet():
with self.patched_setup_context():
run_setup(test_setup_py, ['install'])
except SandboxViolation:
self.fail('Installation caused SandboxViolation')
except IndexError:
# Test fails in some cases due to bugs in Python
# See https://bitbucket.org/pypa/setuptools/issue/201
pass
class TestSetupRequires(unittest.TestCase):
@pytest.yield_fixture
def distutils_package():
distutils_setup_py = SETUP_PY.replace(
'from setuptools import setup',
'from distutils.core import setup',
)
with contexts.tempdir(cd=os.chdir):
with open('setup.py', 'w') as f:
f.write(distutils_setup_py)
yield
class TestDistutilsPackage:
def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
run_setup('setup.py', ['bdist_egg'])
class TestSetupRequires:
def test_setup_requires_honors_fetch_params(self):
"""
@ -260,25 +300,27 @@ class TestSetupRequires(unittest.TestCase):
# Some platforms (Jython) don't find a port to which to bind,
# so skip this test for them.
return
with quiet_context():
with contexts.quiet():
# create an sdist that has a build-time dependency.
with TestSetupRequires.create_sdist() as dist_file:
with tempdir_context() as temp_install_dir:
with environment_context(PYTHONPATH=temp_install_dir):
ei_params = ['--index-url', p_index.url,
with contexts.tempdir() as temp_install_dir:
with contexts.environment(PYTHONPATH=temp_install_dir):
ei_params = [
'--index-url', p_index.url,
'--allow-hosts', p_index_loc,
'--exclude-scripts', '--install-dir', temp_install_dir,
dist_file]
with reset_setup_stop_context():
with argv_context(['easy_install']):
# attempt to install the dist. It should fail because
# it doesn't exist.
self.assertRaises(SystemExit,
easy_install_pkg.main, ei_params)
'--exclude-scripts',
'--install-dir', temp_install_dir,
dist_file,
]
with contexts.argv(['easy_install']):
# attempt to install the dist. It should fail because
# it doesn't exist.
with pytest.raises(SystemExit):
easy_install_pkg.main(ei_params)
# there should have been two or three requests to the server
# (three happens on Python 3.3a)
self.assertTrue(2 <= len(p_index.requests) <= 3)
self.assertEqual(p_index.requests[0].path, '/does-not-exist/')
assert 2 <= len(p_index.requests) <= 3
assert p_index.requests[0].path == '/does-not-exist/'
@staticmethod
@contextlib.contextmanager
@ -287,18 +329,17 @@ class TestSetupRequires(unittest.TestCase):
Return an sdist with a setup_requires dependency (of something that
doesn't exist)
"""
with tempdir_context() as dir:
with contexts.tempdir() as dir:
dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz')
make_trivial_sdist(
dist_path,
textwrap.dedent("""
import setuptools
setuptools.setup(
name="setuptools-test-fetcher",
version="1.0",
setup_requires = ['does-not-exist'],
)
""").lstrip())
script = DALS("""
import setuptools
setuptools.setup(
name="setuptools-test-fetcher",
version="1.0",
setup_requires = ['does-not-exist'],
)
""")
make_trivial_sdist(dist_path, script)
yield dist_path
def test_setup_requires_overrides_version_conflict(self):
@ -316,22 +357,17 @@ class TestSetupRequires(unittest.TestCase):
working_set.add(fake_dist)
try:
with tempdir_context() as temp_dir:
with contexts.tempdir() as temp_dir:
test_pkg = create_setup_requires_package(temp_dir)
test_setup_py = os.path.join(test_pkg, 'setup.py')
with quiet_context() as (stdout, stderr):
with reset_setup_stop_context():
try:
# Don't even need to install the package, just
# running the setup.py at all is sufficient
run_setup(test_setup_py, ['--name'])
except VersionConflict:
self.fail('Installing setup.py requirements '
'caused a VersionConflict')
with contexts.quiet() as (stdout, stderr):
# Don't even need to install the package, just
# running the setup.py at all is sufficient
run_setup(test_setup_py, ['--name'])
lines = stdout.readlines()
self.assertTrue(len(lines) > 0)
self.assertTrue(lines[-1].strip(), 'test_pkg')
assert len(lines) > 0
assert lines[-1].strip(), 'test_pkg'
finally:
pkg_resources.__setstate__(pr_state)
@ -352,17 +388,16 @@ def create_setup_requires_package(path):
test_setup_py = os.path.join(test_pkg, 'setup.py')
os.mkdir(test_pkg)
f = open(test_setup_py, 'w')
f.write(textwrap.dedent("""\
import setuptools
setuptools.setup(**%r)
""" % test_setup_attrs))
f.close()
with open(test_setup_py, 'w') as f:
f.write(DALS("""
import setuptools
setuptools.setup(**%r)
""" % test_setup_attrs))
foobar_path = os.path.join(path, 'foobar-0.1.tar.gz')
make_trivial_sdist(
foobar_path,
textwrap.dedent("""\
DALS("""
import setuptools
setuptools.setup(
name='foobar',
@ -386,71 +421,127 @@ def make_trivial_sdist(dist_path, setup_py):
MemFile = StringIO
setup_py_bytes = MemFile(setup_py.encode('utf-8'))
setup_py_file.size = len(setup_py_bytes.getvalue())
dist = tarfile.open(dist_path, 'w:gz')
try:
with tarfile_open(dist_path, 'w:gz') as dist:
dist.addfile(setup_py_file, fileobj=setup_py_bytes)
finally:
dist.close()
@contextlib.contextmanager
def tempdir_context(cd=lambda dir:None):
temp_dir = tempfile.mkdtemp()
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
class TestScriptHeader:
non_ascii_exe = '/Users/José/bin/python'
exe_with_spaces = r'C:\Program Files\Python33\python.exe'
@contextlib.contextmanager
def environment_context(**updates):
old_env = os.environ.copy()
os.environ.update(updates)
try:
yield
finally:
for key in updates:
del os.environ[key]
os.environ.update(old_env)
@pytest.mark.skipif(
sys.platform.startswith('java') and ei.is_sh(sys.executable),
reason="Test cannot run under java when executable is sh"
)
def test_get_script_header(self):
expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python')
assert actual == expected
@contextlib.contextmanager
def argv_context(repl):
old_argv = sys.argv[:]
sys.argv[:] = repl
yield
sys.argv[:] = old_argv
expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath
(sys.executable))
actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x')
assert actual == expected
@contextlib.contextmanager
def reset_setup_stop_context():
"""
When the setuptools tests are run using setup.py test, and then
one wants to invoke another setup() command (such as easy_install)
within those tests, it's necessary to reset the global variable
in distutils.core so that the setup() command will run naturally.
"""
setup_stop_after = distutils.core._setup_stop_after
distutils.core._setup_stop_after = None
yield
distutils.core._setup_stop_after = setup_stop_after
actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe)
expected = '#!%s -x\n' % self.non_ascii_exe
assert actual == expected
actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
executable='"'+self.exe_with_spaces+'"')
expected = '#!"%s"\n' % self.exe_with_spaces
assert actual == expected
@pytest.mark.xfail(
compat.PY3 and os.environ.get("LC_CTYPE") in ("C", "POSIX"),
reason="Test fails in this locale on Python 3"
)
@mock.patch.dict(sys.modules, java=mock.Mock(lang=mock.Mock(System=
mock.Mock(getProperty=mock.Mock(return_value="")))))
@mock.patch('sys.platform', 'java1.5.0_13')
def test_get_script_header_jython_workaround(self, tmpdir):
# Create a mock sys.executable that uses a shebang line
header = DALS("""
#!/usr/bin/python
# -*- coding: utf-8 -*-
""")
exe = tmpdir / 'exe.py'
with exe.open('w') as f:
f.write(header)
exe = str(exe)
header = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python',
executable=exe)
assert header == '#!/usr/bin/env %s\n' % exe
expect_out = 'stdout' if sys.version_info < (2,7) else 'stderr'
with contexts.quiet() as (stdout, stderr):
# When options are included, generate a broken shebang line
# with a warning emitted
candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x',
executable=exe)
assert candidate == '#!%s -x\n' % exe
output = locals()[expect_out]
assert 'Unable to adapt shebang line' in output.getvalue()
with contexts.quiet() as (stdout, stderr):
candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python',
executable=self.non_ascii_exe)
assert candidate == '#!%s -x\n' % self.non_ascii_exe
output = locals()[expect_out]
assert 'Unable to adapt shebang line' in output.getvalue()
@contextlib.contextmanager
def quiet_context():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
class TestCommandSpec:
def test_custom_launch_command(self):
"""
Show how a custom CommandSpec could be used to specify a #! executable
which takes parameters.
"""
cmd = ei.CommandSpec(['/usr/bin/env', 'python3'])
assert cmd.as_header() == '#!/usr/bin/env python3\n'
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = StringIO()
new_stderr = sys.stderr = StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
def test_from_param_for_CommandSpec_is_passthrough(self):
"""
from_param should return an instance of a CommandSpec
"""
cmd = ei.CommandSpec(['python'])
cmd_new = ei.CommandSpec.from_param(cmd)
assert cmd is cmd_new
def test_from_environment_with_spaces_in_executable(self):
with mock.patch('sys.executable', TestScriptHeader.exe_with_spaces):
cmd = ei.CommandSpec.from_environment()
assert len(cmd) == 1
assert cmd.as_header().startswith('#!"')
def test_from_simple_string_uses_shlex(self):
"""
In order to support `executable = /usr/bin/env my-python`, make sure
from_param invokes shlex on that input.
"""
cmd = ei.CommandSpec.from_param('/usr/bin/env my-python')
assert len(cmd) == 2
assert '"' not in cmd.as_header()
def test_sys_executable(self):
"""
CommandSpec.from_string(sys.executable) should contain just that param.
"""
writer = ei.ScriptWriter.best()
cmd = writer.command_spec_class.from_string(sys.executable)
assert len(cmd) == 1
assert cmd[0] == sys.executable
class TestWindowsScriptWriter:
def test_header(self):
hdr = ei.WindowsScriptWriter.get_script_header('')
assert hdr.startswith('#!')
assert hdr.endswith('\n')
hdr = hdr.lstrip('#!')
hdr = hdr.rstrip('\n')
# header should not start with an escaped quote
assert not hdr.startswith('\\"')

View File

@ -1,173 +1,98 @@
import os
import sys
import tempfile
import shutil
import unittest
import stat
import pkg_resources
import warnings
from setuptools.command import egg_info
from setuptools import svn_utils
from setuptools.tests import environment, test_svn
from setuptools.tests.py26compat import skipIf
import pytest
ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10')
"An entries file generated with svn 1.6.17 against the legacy Setuptools repo"
from . import environment
from .textwrap import DALS
from . import contexts
class TestEggInfo(unittest.TestCase):
class TestEggInfo:
def setUp(self):
self.test_dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.test_dir, '.svn'))
setup_script = DALS("""
from setuptools import setup
self.old_cwd = os.getcwd()
os.chdir(self.test_dir)
setup(
name='foo',
py_modules=['hello'],
entry_points={'console_scripts': ['hi = hello.run']},
zip_safe=False,
)
""")
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.test_dir)
def _create_project(self):
with open('setup.py', 'w') as f:
f.write(self.setup_script)
def _write_entries(self, entries):
fn = os.path.join(self.test_dir, '.svn', 'entries')
entries_f = open(fn, 'wb')
entries_f.write(entries)
entries_f.close()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_version_10_format(self):
"""
"""
#keeping this set for 1.6 is a good check on the get_svn_revision
#to ensure I return using svnversion what would had been returned
version_str = svn_utils.SvnInfo.get_svn_version()
version = [int(x) for x in version_str.split('.')[:2]]
if version != [1, 6]:
if hasattr(self, 'skipTest'):
self.skipTest('')
else:
sys.stderr.write('\n Skipping due to SVN Version\n')
return
with open('hello.py', 'w') as f:
f.write(DALS("""
def run():
print('hello')
"""))
self._write_entries(ENTRIES_V10)
rev = egg_info.egg_info.get_svn_revision()
self.assertEqual(rev, '89000')
@pytest.yield_fixture
def env(self):
class Environment(str): pass
def test_version_10_format_legacy_parser(self):
"""
"""
path_variable = None
for env in os.environ:
if env.lower() == 'path':
path_variable = env
with contexts.tempdir(prefix='setuptools-test.') as env_dir:
env = Environment(env_dir)
os.chmod(env_dir, stat.S_IRWXU)
subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
env.paths = dict(
(dirname, os.path.join(env_dir, dirname))
for dirname in subs
)
list(map(os.mkdir, env.paths.values()))
config = os.path.join(env.paths['home'], '.pydistutils.cfg')
with open(config, 'w') as f:
f.write(DALS("""
[egg_info]
egg-base = %(egg-base)s
""" % env.paths
))
yield env
if path_variable:
old_path = os.environ[path_variable]
os.environ[path_variable] = ''
#catch_warnings not available until py26
warning_filters = warnings.filters
warnings.filters = warning_filters[:]
try:
warnings.simplefilter("ignore", DeprecationWarning)
self._write_entries(ENTRIES_V10)
rev = egg_info.egg_info.get_svn_revision()
finally:
#restore the warning filters
warnings.filters = warning_filters
#restore the os path
if path_variable:
os.environ[path_variable] = old_path
def test_egg_base_installed_egg_info(self, tmpdir_cwd, env):
self._create_project()
self.assertEqual(rev, '89000')
DUMMY_SOURCE_TXT = """CHANGES.txt
CONTRIBUTORS.txt
HISTORY.txt
LICENSE
MANIFEST.in
README.txt
setup.py
dummy/__init__.py
dummy/test.txt
dummy.egg-info/PKG-INFO
dummy.egg-info/SOURCES.txt
dummy.egg-info/dependency_links.txt
dummy.egg-info/top_level.txt"""
class TestSvnDummy(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return None
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
#trying the latest version
self.base_version = (1, 8)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummy, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
data_stream=1)
environ = os.environ.copy().update(
HOME=env.paths['home'],
)
cmd = [
'install',
'--home', env.paths['home'],
'--install-lib', env.paths['lib'],
'--install-scripts', env.paths['scripts'],
'--install-data', env.paths['data'],
]
code, data = environment.run_setup_py(
cmd=cmd,
pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
data_stream=1,
env=environ,
)
if code:
raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
infile = open(sources, 'r')
try:
read_contents = infile.read()
finally:
infile.close()
del infile
actual = self._find_egg_info_files(env.paths['lib'])
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
expected = [
'PKG-INFO',
'SOURCES.txt',
'dependency_links.txt',
'entry_points.txt',
'not-zip-safe',
'top_level.txt',
]
assert sorted(actual) == expected
return data
class TestSvnDummyLegacy(environment.ZippedEnvironment):
def setUp(self):
self.base_version = (1, 6)
self.dataname = "dummy%i%i" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvnDummyLegacy, self).setUp()
def test_sources(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
path="",
data_stream=1)
if code:
raise AssertionError(data)
sources = os.path.join('dummy.egg-info', 'SOURCES.txt')
infile = open(sources, 'r')
try:
read_contents = infile.read()
finally:
infile.close()
del infile
self.assertEqual(DUMMY_SOURCE_TXT, read_contents)
return data
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def _find_egg_info_files(self, root):
results = (
filenames
for dirpath, dirnames, filenames in os.walk(root)
if os.path.basename(dirpath) == 'EGG-INFO'
)
# expect exactly one result
result, = results
return result

View File

@ -0,0 +1,170 @@
"""Tests for setuptools.find_packages()."""
import os
import sys
import shutil
import tempfile
import platform
import pytest
import setuptools
from setuptools import find_packages
find_420_packages = setuptools.PEP420PackageFinder.find
# modeled after CPython's test.support.can_symlink
def can_symlink():
TESTFN = tempfile.mktemp()
symlink_path = TESTFN + "can_symlink"
try:
os.symlink(TESTFN, symlink_path)
can = True
except (OSError, NotImplementedError, AttributeError):
can = False
else:
os.remove(symlink_path)
globals().update(can_symlink=lambda: can)
return can
def has_symlink():
bad_symlink = (
# Windows symlink directory detection is broken on Python 3.2
platform.system() == 'Windows' and sys.version_info[:2] == (3,2)
)
return can_symlink() and not bad_symlink
class TestFindPackages:
def setup_method(self, method):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def teardown_method(self, method):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
assert packages == ['pkg', 'pkg.subpkg']
def test_exclude(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir, exclude=('pkg.*',))
assert packages == ['pkg']
def test_include_excludes_other(self):
"""
If include is specified, other packages should be excluded.
"""
self._touch('__init__.py', self.pkg_dir)
alt_dir = self._mkdir('other_pkg', self.dist_dir)
self._touch('__init__.py', alt_dir)
packages = find_packages(self.dist_dir, include=['other_pkg'])
assert packages == ['other_pkg']
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
assert 'pkg.some.data' not in packages
def test_dir_with_packages_in_subdir_is_excluded(self):
"""
Ensure that a package in a non-package such as build/pkg/__init__.py
is excluded.
"""
build_dir = self._mkdir('build', self.dist_dir)
build_pkg_dir = self._mkdir('pkg', build_dir)
self._touch('__init__.py', build_pkg_dir)
packages = find_packages(self.dist_dir)
assert 'build.pkg' not in packages
@pytest.mark.skipif(not has_symlink(), reason='Symlink support required')
def test_symlinked_packages_are_included(self):
"""
A symbolically-linked directory should be treated like any other
directory when matched as a package.
Create a link from lpkg -> pkg.
"""
self._touch('__init__.py', self.pkg_dir)
linked_pkg = os.path.join(self.dist_dir, 'lpkg')
os.symlink('pkg', linked_pkg)
assert os.path.isdir(linked_pkg)
packages = find_packages(self.dist_dir)
assert 'lpkg' in packages
def _assert_packages(self, actual, expected):
assert set(actual) == set(expected)
def test_pep420_ns_package(self):
packages = find_420_packages(
self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes(self):
packages = find_420_packages(
self.dist_dir, exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes_or_excludes(self):
packages = find_420_packages(self.dist_dir)
expected = [
'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
self._assert_packages(packages, expected)
def test_regular_package_with_nested_pep420_ns_packages(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_420_packages(
self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_non_package_dirs(self):
shutil.rmtree(self.docs_dir)
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
packages = find_420_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])

View File

@ -0,0 +1,99 @@
"""Run some integration tests.
Try to install a few packages.
"""
import glob
import os
import sys
import pytest
from setuptools.command.easy_install import easy_install
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
from setuptools.compat import urlopen
def setup_module(module):
packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
for pkg in packages:
try:
__import__(pkg)
tmpl = "Integration tests cannot run when {pkg} is installed"
pytest.skip(tmpl.format(**locals()))
except ImportError:
pass
try:
urlopen('https://pypi.python.org/pypi')
except Exception as exc:
pytest.skip(reason=str(exc))
@pytest.fixture
def install_context(request, tmpdir, monkeypatch):
"""Fixture to set up temporary installation directory.
"""
# Save old values so we can restore them.
new_cwd = tmpdir.mkdir('cwd')
user_base = tmpdir.mkdir('user_base')
user_site = tmpdir.mkdir('user_site')
install_dir = tmpdir.mkdir('install_dir')
def fin():
# undo the monkeypatch, particularly needed under
# windows because of kept handle on cwd
monkeypatch.undo()
new_cwd.remove()
user_base.remove()
user_site.remove()
install_dir.remove()
request.addfinalizer(fin)
# Change the environment and site settings to control where the
# files are installed and ensure we do not overwrite anything.
monkeypatch.chdir(new_cwd)
monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
monkeypatch.setattr('site.USER_BASE', user_base.strpath)
monkeypatch.setattr('site.USER_SITE', user_site.strpath)
monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
# Set up the command for performing the installation.
dist = Distribution()
cmd = easy_install(dist)
cmd.install_dir = install_dir.strpath
return cmd
def _install_one(requirement, cmd, pkgname, modulename):
cmd.args = [requirement]
cmd.ensure_finalized()
cmd.run()
target = cmd.install_dir
dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
assert dest_path
assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
def test_stevedore(install_context):
_install_one('stevedore', install_context,
'stevedore', 'extension.py')
@pytest.mark.xfail
def test_virtualenvwrapper(install_context):
_install_one('virtualenvwrapper', install_context,
'virtualenvwrapper', 'hook_loader.py')
def test_pbr(install_context):
_install_one('pbr', install_context,
'pbr', 'core.py')
@pytest.mark.xfail
def test_python_novaclient(install_context):
_install_one('python-novaclient', install_context,
'novaclient', 'base.py')

View File

@ -1,48 +1,43 @@
import os
import unittest
from setuptools.tests.py26compat import skipIf
try:
import ast
except ImportError:
pass
import pytest
class TestMarkerlib(unittest.TestCase):
@skipIf('ast' not in globals(),
"ast not available (Python < 2.6?)")
class TestMarkerlib:
@pytest.mark.importorskip('ast')
def test_markers(self):
from _markerlib import interpret, default_environment, compile
os_name = os.name
self.assertTrue(interpret(""))
self.assertTrue(interpret("os.name != 'buuuu'"))
self.assertTrue(interpret("os_name != 'buuuu'"))
self.assertTrue(interpret("python_version > '1.0'"))
self.assertTrue(interpret("python_version < '5.0'"))
self.assertTrue(interpret("python_version <= '5.0'"))
self.assertTrue(interpret("python_version >= '1.0'"))
self.assertTrue(interpret("'%s' in os.name" % os_name))
self.assertTrue(interpret("'%s' in os_name" % os_name))
self.assertTrue(interpret("'buuuu' not in os.name"))
self.assertFalse(interpret("os.name == 'buuuu'"))
self.assertFalse(interpret("os_name == 'buuuu'"))
self.assertFalse(interpret("python_version < '1.0'"))
self.assertFalse(interpret("python_version > '5.0'"))
self.assertFalse(interpret("python_version >= '5.0'"))
self.assertFalse(interpret("python_version <= '1.0'"))
self.assertFalse(interpret("'%s' not in os.name" % os_name))
self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))
self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'"))
assert interpret("")
assert interpret("os.name != 'buuuu'")
assert interpret("os_name != 'buuuu'")
assert interpret("python_version > '1.0'")
assert interpret("python_version < '5.0'")
assert interpret("python_version <= '5.0'")
assert interpret("python_version >= '1.0'")
assert interpret("'%s' in os.name" % os_name)
assert interpret("'%s' in os_name" % os_name)
assert interpret("'buuuu' not in os.name")
assert not interpret("os.name == 'buuuu'")
assert not interpret("os_name == 'buuuu'")
assert not interpret("python_version < '1.0'")
assert not interpret("python_version > '5.0'")
assert not interpret("python_version >= '5.0'")
assert not interpret("python_version <= '1.0'")
assert not interpret("'%s' not in os.name" % os_name)
assert not interpret("'buuuu' in os.name and python_version >= '5.0'")
assert not interpret("'buuuu' in os_name and python_version >= '5.0'")
environment = default_environment()
environment['extra'] = 'test'
self.assertTrue(interpret("extra == 'test'", environment))
self.assertFalse(interpret("extra == 'doc'", environment))
assert interpret("extra == 'test'", environment)
assert not interpret("extra == 'doc'", environment)
def raises_nameError():
try:
interpret("python.version == '42'")
@ -50,9 +45,9 @@ class TestMarkerlib(unittest.TestCase):
pass
else:
raise Exception("Expected NameError")
raises_nameError()
def raises_syntaxError():
try:
interpret("(x for x in (4,))")
@ -60,9 +55,9 @@ class TestMarkerlib(unittest.TestCase):
pass
else:
raise Exception("Expected SyntaxError")
raises_syntaxError()
statement = "python_version == '5'"
self.assertEqual(compile(statement).__doc__, statement)
assert compile(statement).__doc__ == statement

View File

@ -0,0 +1,179 @@
"""
Tests for msvc9compiler.
"""
import os
import contextlib
import distutils.errors
import pytest
try:
from unittest import mock
except ImportError:
import mock
from . import contexts
# importing only setuptools should apply the patch
__import__('setuptools')
pytest.importorskip("distutils.msvc9compiler")
def mock_reg(hkcu=None, hklm=None):
"""
Return a mock for distutils.msvc9compiler.Reg, patched
to mock out the functions that access the registry.
"""
_winreg = getattr(distutils.msvc9compiler, '_winreg', None)
winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg)
hives = {
winreg.HKEY_CURRENT_USER: hkcu or {},
winreg.HKEY_LOCAL_MACHINE: hklm or {},
}
@classmethod
def read_keys(cls, base, key):
"""Return list of registry keys."""
hive = hives.get(base, {})
return [
k.rpartition('\\')[2]
for k in hive if k.startswith(key.lower())
]
@classmethod
def read_values(cls, base, key):
"""Return dict of registry keys and values."""
hive = hives.get(base, {})
return dict(
(k.rpartition('\\')[2], hive[k])
for k in hive if k.startswith(key.lower())
)
return mock.patch.multiple(distutils.msvc9compiler.Reg,
read_keys=read_keys, read_values=read_values)
class TestModulePatch:
"""
Ensure that importing setuptools is sufficient to replace
the standard find_vcvarsall function with a version that
recognizes the "Visual C++ for Python" package.
"""
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
def test_patched(self):
"Test the module is actually patched"
mod_name = distutils.msvc9compiler.find_vcvarsall.__module__
assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched"
def test_no_registry_entryies_means_nothing_found(self):
"""
No registry entries or environment variable should lead to an error
directing the user to download vcpython27.
"""
find_vcvarsall = distutils.msvc9compiler.find_vcvarsall
query_vcvarsall = distutils.msvc9compiler.query_vcvarsall
with contexts.environment(VS90COMNTOOLS=None):
with mock_reg():
assert find_vcvarsall(9.0) is None
expected = distutils.errors.DistutilsPlatformError
with pytest.raises(expected) as exc:
query_vcvarsall(9.0)
assert 'aka.ms/vcpython27' in str(exc)
@pytest.yield_fixture
def user_preferred_setting(self):
"""
Set up environment with different install dirs for user vs. system
and yield the user_install_dir for the expected result.
"""
with self.mock_install_dir() as user_install_dir:
with self.mock_install_dir() as system_install_dir:
reg = mock_reg(
hkcu={
self.key_32: user_install_dir,
},
hklm={
self.key_32: system_install_dir,
self.key_64: system_install_dir,
},
)
with reg:
yield user_install_dir
def test_prefer_current_user(self, user_preferred_setting):
"""
Ensure user's settings are preferred.
"""
result = distutils.msvc9compiler.find_vcvarsall(9.0)
expected = os.path.join(user_preferred_setting, 'vcvarsall.bat')
assert expected == result
@pytest.yield_fixture
def local_machine_setting(self):
"""
Set up environment with only the system environment configured.
"""
with self.mock_install_dir() as system_install_dir:
reg = mock_reg(
hklm={
self.key_32: system_install_dir,
},
)
with reg:
yield system_install_dir
def test_local_machine_recognized(self, local_machine_setting):
"""
Ensure machine setting is honored if user settings are not present.
"""
result = distutils.msvc9compiler.find_vcvarsall(9.0)
expected = os.path.join(local_machine_setting, 'vcvarsall.bat')
assert expected == result
@pytest.yield_fixture
def x64_preferred_setting(self):
"""
Set up environment with 64-bit and 32-bit system settings configured
and yield the canonical location.
"""
with self.mock_install_dir() as x32_dir:
with self.mock_install_dir() as x64_dir:
reg = mock_reg(
hklm={
# This *should* only exist on 32-bit machines
self.key_32: x32_dir,
# This *should* only exist on 64-bit machines
self.key_64: x64_dir,
},
)
with reg:
yield x32_dir
def test_ensure_64_bit_preferred(self, x64_preferred_setting):
"""
Ensure 64-bit system key is preferred.
"""
result = distutils.msvc9compiler.find_vcvarsall(9.0)
expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat')
assert expected == result
@staticmethod
@contextlib.contextmanager
def mock_install_dir():
"""
Make a mock install dir in a unique location so that tests can
distinguish which dir was detected in a given scenario.
"""
with contexts.tempdir() as result:
vcvarsall = os.path.join(result, 'vcvarsall.bat')
with open(vcvarsall, 'w'):
pass
yield result

View File

@ -1,26 +1,24 @@
"""Package Index Tests
"""
import sys
import os
import unittest
import pkg_resources
from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url
import distutils.errors
from setuptools.compat import httplib, HTTPError, unicode, pathname2url
import pkg_resources
import setuptools.package_index
from setuptools.tests.server import IndexServer
class TestPackageIndex(unittest.TestCase):
class TestPackageIndex:
def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
except Exception as v:
assert url in str(v)
else:
self.assertTrue(isinstance(v, HTTPError))
assert isinstance(v, HTTPError)
def test_bad_url_typo(self):
# issue 16
@ -33,11 +31,10 @@ class TestPackageIndex(unittest.TestCase):
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
except Exception as v:
assert url in str(v)
else:
self.assertTrue(isinstance(v, HTTPError))
assert isinstance(v, HTTPError)
def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex(
@ -51,9 +48,8 @@ class TestPackageIndex(unittest.TestCase):
url = 'http://example.com'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue('line' in str(v))
except Exception as v:
assert 'line' in str(v)
else:
raise AssertionError('Should have raise here!')
@ -69,8 +65,7 @@ class TestPackageIndex(unittest.TestCase):
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except distutils.errors.DistutilsError:
error = sys.exc_info()[1]
except distutils.errors.DistutilsError as error:
msg = unicode(error)
assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
return
@ -94,7 +89,7 @@ class TestPackageIndex(unittest.TestCase):
hosts=('www.example.com',)
)
url = 'file:///tmp/test_package_index'
self.assertTrue(index.url_ok(url, True))
assert index.url_ok(url, True)
def test_links_priority(self):
"""
@ -127,21 +122,30 @@ class TestPackageIndex(unittest.TestCase):
server.stop()
# the distribution has been found
self.assertTrue('foobar' in pi)
assert 'foobar' in pi
# we have only one link, because links are compared without md5
self.assertTrue(len(pi['foobar'])==1)
assert len(pi['foobar'])==1
# the link should be from the index
self.assertTrue('correct_md5' in pi['foobar'][0].location)
assert 'correct_md5' in pi['foobar'][0].location
def test_parse_bdist_wininst(self):
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
parse = setuptools.package_index.parse_bdist_wininst
actual = parse('reportlab-2.5.win32-py2.4.exe')
expected = 'reportlab-2.5', '2.4', 'win32'
assert actual == expected
actual = parse('reportlab-2.5.win32.exe')
expected = 'reportlab-2.5', None, 'win32'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
expected = 'reportlab-2.5', '2.7', 'win-amd64'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64.exe')
expected = 'reportlab-2.5', None, 'win-amd64'
assert actual == expected
def test__vcs_split_rev_from_url(self):
"""
@ -149,55 +153,51 @@ class TestPackageIndex(unittest.TestCase):
"""
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995')
self.assertEqual(url, 'https://example.com/bar')
self.assertEqual(rev, '2995')
assert url == 'https://example.com/bar'
assert rev == '2995'
def test_local_index(self):
def test_local_index(self, tmpdir):
"""
local_open should be able to read an index from the file system.
"""
f = open('index.html', 'w')
f.write('<div>content</div>')
f.close()
try:
url = 'file:' + pathname2url(os.getcwd()) + '/'
res = setuptools.package_index.local_open(url)
finally:
os.remove('index.html')
index_file = tmpdir / 'index.html'
with index_file.open('w') as f:
f.write('<div>content</div>')
url = 'file:' + pathname2url(str(tmpdir)) + '/'
res = setuptools.package_index.local_open(url)
assert 'content' in res.read()
class TestContentCheckers(unittest.TestCase):
class TestContentCheckers:
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
checker.feed('You should probably not be using MD5'.encode('ascii'))
self.assertEqual(checker.hash.hexdigest(),
'f12895fdffbd45007040d2e44df98478')
self.assertTrue(checker.is_valid())
assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
assert checker.is_valid()
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
assert checker.is_valid()
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
assert checker.is_valid()
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
self.assertEqual(checker.hash_name, 'md5')
assert checker.hash_name == 'md5'
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
rep = checker.report(lambda x: x, 'My message about %s')
self.assertEqual(rep, 'My message about md5')
assert rep == 'My message about md5'

View File

@ -1,69 +1,43 @@
"""develop tests
"""
import sys
import os
import shutil
import unittest
import tempfile
import types
import pytest
import pkg_resources
import setuptools.sandbox
from setuptools.sandbox import DirectorySandbox, SandboxViolation
from setuptools.sandbox import DirectorySandbox
def has_win32com():
"""
Run this to determine if the local machine has win32com, and if it
does, include additional tests.
"""
if not sys.platform.startswith('win32'):
return False
try:
mod = __import__('win32com')
except ImportError:
return False
return True
class TestSandbox(unittest.TestCase):
class TestSandbox:
def setUp(self):
self.dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.dir)
def test_devnull(self):
if sys.version < '2.4':
return
sandbox = DirectorySandbox(self.dir)
def test_devnull(self, tmpdir):
sandbox = DirectorySandbox(str(tmpdir))
sandbox.run(self._file_writer(os.devnull))
@staticmethod
def _file_writer(path):
def do_write():
f = open(path, 'w')
f.write('xxx')
f.close()
with open(path, 'w') as f:
f.write('xxx')
return do_write
_file_writer = staticmethod(_file_writer)
if has_win32com():
def test_win32com(self):
"""
win32com should not be prevented from caching COM interfaces
in gen_py.
"""
import win32com
gen_py = win32com.__gen_path__
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(self.dir)
try:
try:
sandbox.run(self._file_writer(target))
except SandboxViolation:
self.fail("Could not create gen_py file due to SandboxViolation")
finally:
if os.path.exists(target): os.remove(target)
def test_win32com(self, tmpdir):
"""
win32com should not be prevented from caching COM interfaces
in gen_py.
"""
win32com = pytest.importorskip('win32com')
gen_py = win32com.__gen_path__
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(str(tmpdir))
try:
# attempt to create gen_py file
sandbox.run(self._file_writer(target))
finally:
if os.path.exists(target):
os.remove(target)
def test_setup_py_with_BOM(self):
"""
@ -72,8 +46,57 @@ class TestSandbox(unittest.TestCase):
target = pkg_resources.resource_filename(__name__,
'script-with-bom.py')
namespace = types.ModuleType('namespace')
setuptools.sandbox.execfile(target, vars(namespace))
setuptools.sandbox._execfile(target, vars(namespace))
assert namespace.result == 'passed'
if __name__ == '__main__':
unittest.main()
def test_setup_py_with_CRLF(self, tmpdir):
setup_py = tmpdir / 'setup.py'
with setup_py.open('wb') as stream:
stream.write(b'"degenerate script"\r\n')
setuptools.sandbox._execfile(str(setup_py), globals())
class TestExceptionSaver:
def test_exception_trapped(self):
with setuptools.sandbox.ExceptionSaver():
raise ValueError("details")
def test_exception_resumed(self):
with setuptools.sandbox.ExceptionSaver() as saved_exc:
raise ValueError("details")
with pytest.raises(ValueError) as caught:
saved_exc.resume()
assert isinstance(caught.value, ValueError)
assert str(caught.value) == 'details'
def test_exception_reconstructed(self):
orig_exc = ValueError("details")
with setuptools.sandbox.ExceptionSaver() as saved_exc:
raise orig_exc
with pytest.raises(ValueError) as caught:
saved_exc.resume()
assert isinstance(caught.value, ValueError)
assert caught.value is not orig_exc
def test_no_exception_passes_quietly(self):
with setuptools.sandbox.ExceptionSaver() as saved_exc:
pass
saved_exc.resume()
def test_unpickleable_exception(self):
class CantPickleThis(Exception):
"This Exception is unpickleable because it's not in globals"
with setuptools.sandbox.ExceptionSaver() as saved_exc:
raise CantPickleThis('detail')
with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
saved_exc.resume()
assert str(caught.value) == "CantPickleThis('detail',)"

View File

@ -6,18 +6,16 @@ import os
import shutil
import sys
import tempfile
import unittest
import unicodedata
import re
from setuptools.tests import environment, test_svn
from setuptools.tests.py26compat import skipIf
import contextlib
from setuptools.compat import StringIO, unicode
from setuptools.tests.py26compat import skipIf
from setuptools.command.sdist import sdist, walk_revctrl
import pytest
import pkg_resources
from setuptools.compat import StringIO, unicode, PY3, PY2
from setuptools.command.sdist import sdist
from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution
from setuptools import svn_utils
SETUP_ATTRS = {
'name': 'sdist_test',
@ -34,32 +32,33 @@ setup(**%r)
""" % SETUP_ATTRS
if sys.version_info >= (3,):
if PY3:
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
else:
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
# Cannot use context manager because of Python 2.4
@contextlib.contextmanager
def quiet():
global old_stdout, old_stderr
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
def unquiet():
sys.stdout, sys.stderr = old_stdout, old_stderr
try:
yield
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
# Fake byte literals for Python <= 2.5
def b(s, encoding='utf-8'):
if sys.version_info >= (3,):
if PY3:
return s.encode(encoding)
return s
# Convert to POSIX path
def posix(path):
if sys.version_info >= (3,) and not isinstance(path, str):
if PY3 and not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b('/'))
else:
return path.replace(os.sep, '/')
@ -74,17 +73,18 @@ def decompose(path):
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
pass # Not UTF-8
return path
class TestSdistTest(unittest.TestCase):
class TestSdistTest:
def setUp(self):
def setup_method(self, method):
self.temp_dir = tempfile.mkdtemp()
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
f.write(SETUP_PY)
f.close()
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
os.mkdir(test_pkg)
@ -97,7 +97,7 @@ class TestSdistTest(unittest.TestCase):
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def tearDown(self):
def teardown_method(self, method):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
@ -112,17 +112,40 @@ class TestSdistTest(unittest.TestCase):
cmd = sdist(dist)
cmd.ensure_finalized()
# squelch output
quiet()
try:
with quiet():
cmd.run()
finally:
unquiet()
manifest = cmd.filelist.files
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
assert os.path.join('sdist_test', 'a.txt') in manifest
assert os.path.join('sdist_test', 'b.txt') in manifest
assert os.path.join('sdist_test', 'c.rst') not in manifest
def test_defaults_case_sensitivity(self):
"""
Make sure default files (README.*, etc.) are added in a case-sensitive
way to avoid problems with packages built on Windows.
"""
open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close()
open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close()
dist = Distribution(SETUP_ATTRS)
# the extension deliberately capitalized for this test
# to make sure the actual filename (not capitalized) gets added
# to the manifest
dist.script_name = 'setup.PY'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
# lowercase all names so we can test in a case-insensitive way to make sure the files are not included
manifest = map(lambda x: x.lower(), cmd.filelist.files)
assert 'readme.rst' not in manifest, manifest
assert 'setup.py' not in manifest, manifest
assert 'setup.cfg' not in manifest, manifest
def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303.
@ -135,34 +158,31 @@ class TestSdistTest(unittest.TestCase):
# UTF-8 filename
filename = os.path.join('sdist_test', 'smörbröd.py')
# Must create the file or it will get stripped.
open(filename, 'w').close()
# Add UTF-8 filename and write manifest
quiet()
try:
with quiet():
mm.run()
mm.filelist.files.append(filename)
mm.filelist.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
u_contents = contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
u_contents = contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
if sys.version_info >= (3,):
self.assertTrue(posix(filename) in u_contents)
else:
self.assertTrue(posix(filename) in contents)
if PY2:
fs_enc = sys.getfilesystemencoding()
filename = filename.decode(fs_enc)
assert posix(filename) in u_contents
# Python 3 only
if sys.version_info >= (3,):
if PY3:
def test_write_manifest_allows_utf8_filenames(self):
# Test for #303.
@ -175,36 +195,37 @@ class TestSdistTest(unittest.TestCase):
# UTF-8 filename
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
# Must touch the file or risk removal
open(filename, "w").close()
# Add filename and write manifest
quiet()
try:
with quiet():
mm.run()
u_filename = filename.decode('utf-8')
mm.filelist.files.append(u_filename)
# Re-write manifest
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
self.assertTrue(posix(filename) in contents)
assert posix(filename) in contents
# The filelist should have been updated as well
self.assertTrue(u_filename in mm.filelist.files)
assert u_filename in mm.filelist.files
def test_write_manifest_skips_non_utf8_filenames(self):
# Test for #303.
"""
Files that cannot be encoded to UTF-8 (specifically, those that
weren't originally successfully decoded and have surrogate
escapes) should be omitted from the manifest.
See https://bitbucket.org/tarek/distribute/issue/303 for history.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
@ -215,32 +236,25 @@ class TestSdistTest(unittest.TestCase):
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
# Add filename with surrogates and write manifest
quiet()
try:
with quiet():
mm.run()
u_filename = filename.decode('utf-8', 'surrogateescape')
mm.filelist.files.append(u_filename)
mm.filelist.append(u_filename)
# Re-write manifest
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
contents.decode('UTF-8')
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
contents.decode('UTF-8')
# The Latin-1 filename should have been skipped
self.assertFalse(posix(filename) in contents)
assert posix(filename) not in contents
# The filelist should have been updated as well
self.assertFalse(u_filename in mm.filelist.files)
assert u_filename not in mm.filelist.files
def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303.
@ -250,17 +264,14 @@ class TestSdistTest(unittest.TestCase):
cmd.ensure_finalized()
# Create manifest
quiet()
try:
with quiet():
cmd.run()
finally:
unquiet()
# Add UTF-8 filename to manifest
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b('\n')+filename)
manifest.write(b('\n') + filename)
manifest.close()
# The file must exist to be included in the filelist
@ -268,19 +279,16 @@ class TestSdistTest(unittest.TestCase):
# Re-read manifest
cmd.filelist.files = []
quiet()
try:
with quiet():
cmd.read_manifest()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
if sys.version_info >= (3,):
if PY3:
filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
# Python 3 only
if sys.version_info >= (3,):
if PY3:
def test_read_manifest_skips_non_utf8_filenames(self):
# Test for #303.
@ -290,17 +298,14 @@ class TestSdistTest(unittest.TestCase):
cmd.ensure_finalized()
# Create manifest
quiet()
try:
with quiet():
cmd.run()
finally:
unquiet()
# Add Latin-1 filename to manifest
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b('\n')+filename)
manifest.write(b('\n') + filename)
manifest.close()
# The file must exist to be included in the filelist
@ -308,22 +313,16 @@ class TestSdistTest(unittest.TestCase):
# Re-read manifest
cmd.filelist.files = []
quiet()
try:
try:
cmd.read_manifest()
except UnicodeDecodeError:
e = sys.exc_info()[1]
self.fail(e)
finally:
unquiet()
with quiet():
cmd.read_manifest()
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files)
assert filename not in cmd.filelist.files
@skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8',
'Unittest fails if locale is not utf-8 but the manifests is recorded correctly')
@pytest.mark.skipif(PY3 and locale.getpreferredencoding() != 'UTF-8',
reason='Unittest fails if locale is not utf-8 but the manifests is '
'recorded correctly')
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
@ -335,31 +334,28 @@ class TestSdistTest(unittest.TestCase):
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
open(filename, 'w').close()
quiet()
try:
with quiet():
cmd.run()
finally:
unquiet()
if sys.platform == 'darwin':
filename = decompose(filename)
if sys.version_info >= (3,):
if PY3:
fs_enc = sys.getfilesystemencoding()
if sys.platform == 'win32':
if fs_enc == 'cp1252':
# Python 3 mangles the UTF-8 filename
filename = filename.decode('cp1252')
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
else:
filename = filename.decode('mbcs')
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
else:
filename = filename.decode('utf-8')
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
else:
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
@ -371,16 +367,13 @@ class TestSdistTest(unittest.TestCase):
# Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
open(filename, 'w').close()
self.assertTrue(os.path.isfile(filename))
assert os.path.isfile(filename)
quiet()
try:
with quiet():
cmd.run()
finally:
unquiet()
if sys.version_info >= (3,):
#not all windows systems have a default FS encoding of cp1252
if PY3:
# not all windows systems have a default FS encoding of cp1252
if sys.platform == 'win32':
# Latin-1 is similar to Windows-1252 however
# on mbcs filesys it is not in latin-1 encoding
@ -390,146 +383,37 @@ class TestSdistTest(unittest.TestCase):
else:
filename = filename.decode('latin-1')
self.assertTrue(filename in cmd.filelist.files)
assert filename in cmd.filelist.files
else:
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
self.assertFalse(filename in cmd.filelist.files)
filename not in cmd.filelist.files
else:
# No conversion takes place under Python 2 and the file
# is included. We shall keep it that way for BBB.
self.assertTrue(filename in cmd.filelist.files)
# Under Python 2 there seems to be no decoded string in the
# filelist. However, due to decode and encoding of the
# file name to get utf-8 Manifest the latin1 maybe excluded
try:
# fs_enc should match how one is expect the decoding to
# be proformed for the manifest output.
fs_enc = sys.getfilesystemencoding()
filename.decode(fs_enc)
assert filename in cmd.filelist.files
except UnicodeDecodeError:
filename not in cmd.filelist.files
class TestDummyOutput(environment.ZippedEnvironment):
def test_default_revctrl():
"""
When _default_revctrl was removed from the `setuptools.command.sdist`
module in 10.0, it broke some systems which keep an old install of
setuptools (Distribute) around. Those old versions require that the
setuptools package continue to implement that interface, so this
function provides that interface, stubbed. See #320 for details.
def setUp(self):
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', "dummy.zip")
self.dataname = "dummy"
super(TestDummyOutput, self).setUp()
def _run(self):
code, data = environment.run_setup_py(["sdist"],
pypath=self.old_cwd,
data_stream=0)
if code:
info = "DIR: " + os.path.abspath('.')
info += "\n SDIST RETURNED: %i\n\n" % code
info += data
raise AssertionError(info)
datalines = data.splitlines()
possible = (
"running sdist",
"running egg_info",
"creating dummy\.egg-info",
"writing dummy\.egg-info",
"writing top-level names to dummy\.egg-info",
"writing dependency_links to dummy\.egg-info",
"writing manifest file 'dummy\.egg-info",
"reading manifest file 'dummy\.egg-info",
"reading manifest template 'MANIFEST\.in'",
"writing manifest file 'dummy\.egg-info",
"creating dummy-0.1.1",
"making hard links in dummy-0\.1\.1",
"copying files to dummy-0\.1\.1",
"copying \S+ -> dummy-0\.1\.1",
"copying dummy",
"copying dummy\.egg-info",
"hard linking \S+ -> dummy-0\.1\.1",
"hard linking dummy",
"hard linking dummy\.egg-info",
"Writing dummy-0\.1\.1",
"creating dist",
"creating 'dist",
"Creating tar archive",
"running check",
"adding 'dummy-0\.1\.1",
"tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1",
"gzip .+ dist/dummy-0\.1\.1\.tar",
"removing 'dummy-0\.1\.1' \\(and everything under it\\)",
)
print(" DIR: " + os.path.abspath('.'))
for line in datalines:
found = False
for pattern in possible:
if re.match(pattern, line):
print(" READ: " + line)
found = True
break
if not found:
raise AssertionError("Unexpexected: %s\n-in-\n%s"
% (line, data))
return data
def test_sources(self):
self._run()
class TestSvn(environment.ZippedEnvironment):
def setUp(self):
version = svn_utils.SvnInfo.get_svn_version()
if not version: # None or Empty
return
self.base_version = tuple([int(x) for x in version.split('.')][:2])
if not self.base_version:
raise ValueError('No SVN tools installed')
elif self.base_version < (1, 3):
raise ValueError('Insufficient SVN Version %s' % version)
elif self.base_version >= (1, 9):
#trying the latest version
self.base_version = (1, 8)
self.dataname = "svn%i%i_example" % self.base_version
self.datafile = os.path.join('setuptools', 'tests',
'svn_data', self.dataname + ".zip")
super(TestSvn, self).setUp()
@skipIf(not test_svn._svn_check, "No SVN to text, in the first place")
def test_walksvn(self):
if self.base_version >= (1, 6):
folder2 = 'third party2'
folder3 = 'third party3'
else:
folder2 = 'third_party2'
folder3 = 'third_party3'
#TODO is this right
expected = set([
os.path.join('a file'),
os.path.join(folder2, 'Changes.txt'),
os.path.join(folder2, 'MD5SUMS'),
os.path.join(folder2, 'README.txt'),
os.path.join(folder3, 'Changes.txt'),
os.path.join(folder3, 'MD5SUMS'),
os.path.join(folder3, 'README.txt'),
os.path.join(folder3, 'TODO.txt'),
os.path.join(folder3, 'fin'),
os.path.join('third_party', 'README.txt'),
os.path.join('folder', folder2, 'Changes.txt'),
os.path.join('folder', folder2, 'MD5SUMS'),
os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'Changes.txt'),
os.path.join('folder', folder3, 'fin'),
os.path.join('folder', folder3, 'MD5SUMS'),
os.path.join('folder', folder3, 'oops'),
os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'),
os.path.join('folder', folder3, 'ZuMachen.txt'),
os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'),
os.path.join('folder', 'lalala.txt'),
os.path.join('folder', 'quest.txt'),
# The example will have a deleted file
# (or should) but shouldn't return it
])
self.assertEqual(set(x for x in walk_revctrl()), expected)
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
This interface must be maintained until Ubuntu 12.04 is no longer
supported (by Setuptools).
"""
ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl'
ep = pkg_resources.EntryPoint.parse(ep_def)
res = ep.resolve()
assert hasattr(res, '__iter__')

View File

@ -1,124 +1,91 @@
# -*- coding: UTF-8 -*-
"""develop tests
"""
import sys
import os, shutil, tempfile, unittest
import tempfile
from __future__ import unicode_literals
import os
import site
from distutils.errors import DistutilsError
from setuptools.compat import StringIO
import pytest
from setuptools.command.test import test
from setuptools.command import easy_install as easy_install_pkg
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
from .textwrap import DALS
from . import contexts
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
"""
SETUP_PY = DALS("""
from setuptools import setup
NS_INIT = """# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
"""
# Make sure this is Latin-1 binary, before writing:
if sys.version_info < (3,):
NS_INIT = NS_INIT.decode('UTF-8')
NS_INIT = NS_INIT.encode('Latin-1')
setup(name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
)
""")
TEST_PY = """import unittest
NS_INIT = DALS("""
# -*- coding: Latin-1 -*-
# Söme Arbiträry Ünicode to test Distribute Issüé 310
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
""")
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
TEST_PY = DALS("""
import unittest
test_suite = unittest.makeSuite(TestTest)
"""
class TestTest(unittest.TestCase):
def test_test(self):
print "Foo" # Should fail under Python 3 unless 2to3 is used
class TestTestTest(unittest.TestCase):
test_suite = unittest.makeSuite(TestTest)
""")
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
# Directory structure
self.dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.dir, 'name'))
os.mkdir(os.path.join(self.dir, 'name', 'space'))
os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'wt')
@pytest.fixture
def sample_test(tmpdir_cwd):
os.makedirs('name/space/tests')
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
# name/__init__.py
init = os.path.join(self.dir, 'name', '__init__.py')
f = open(init, 'wb')
f.write(NS_INIT)
f.close()
# name/space/__init__.py
init = os.path.join(self.dir, 'name', 'space', '__init__.py')
f = open(init, 'wt')
# name/__init__.py
with open('name/__init__.py', 'wb') as f:
f.write(NS_INIT.encode('Latin-1'))
# name/space/__init__.py
with open('name/space/__init__.py', 'wt') as f:
f.write('#empty\n')
f.close()
# name/space/tests/__init__.py
init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
f = open(init, 'wt')
# name/space/tests/__init__.py
with open('name/space/tests/__init__.py', 'wt') as f:
f.write(TEST_PY)
f.close()
os.chdir(self.dir)
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
@pytest.mark.skipif('hasattr(sys, "real_prefix")')
@pytest.mark.usefixtures('user_override')
@pytest.mark.usefixtures('sample_test')
class TestTestTest:
def test_test(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
dist = Distribution(dict(
params = dict(
name='foo',
packages=['name', 'name.space', 'name.space.tests'],
namespace_packages=['name'],
test_suite='name.space.tests.test_suite',
use_2to3=True,
))
)
dist = Distribution(params)
dist.script_name = 'setup.py'
cmd = test(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
with contexts.quiet():
# The test runner calls sys.exit
with contexts.suppress_exceptions(SystemExit):
cmd.run()
except SystemExit: # The test runner calls sys.exit, stop that making an error.
pass
finally:
sys.stdout = old_stdout

View File

@ -1,72 +1,59 @@
"""build_ext tests
"""
import sys, os, shutil, tempfile, unittest, site, zipfile
import os
import zipfile
import contextlib
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
from .textwrap import DALS
from . import contexts
setup(name='foo')
"""
class TestUploadDocsTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
os.mkdir('build')
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
# An empty folder.
os.mkdir('build/empty')
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
"""
Ensure zipfile creation handles common cases, including a folder
containing an empty folder.
"""
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
cmd.target_dir = cmd.upload_dir = 'build'
with contexts.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'foo.zip')
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file:
assert zip_file.namelist() == ['index.html']

View File

@ -0,0 +1,183 @@
"""
Python Script Wrapper for Windows
=================================
setuptools includes wrappers for Python scripts that allows them to be
executed like regular windows programs. There are 2 wrappers, one
for command-line programs, cli.exe, and one for graphical programs,
gui.exe. These programs are almost identical, function pretty much
the same way, and are generated from the same source file. The
wrapper programs are used by copying them to the directory containing
the script they are to wrap and with the same name as the script they
are to wrap.
"""
from __future__ import absolute_import
import sys
import textwrap
import subprocess
import pytest
from setuptools.command.easy_install import nt_quote_arg
import pkg_resources
pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
class WrapperTester:
@classmethod
def prep_script(cls, template):
python_exe = nt_quote_arg(sys.executable)
return template % locals()
@classmethod
def create_script(cls, tmpdir):
"""
Create a simple script, foo-script.py
Note that the script starts with a Unix-style '#!' line saying which
Python executable to run. The wrapper will use this line to find the
correct Python executable.
"""
script = cls.prep_script(cls.script_tmpl)
with (tmpdir / cls.script_name).open('w') as f:
f.write(script)
# also copy cli.exe to the sample directory
with (tmpdir / cls.wrapper_name).open('wb') as f:
w = pkg_resources.resource_string('setuptools', cls.wrapper_source)
f.write(w)
class TestCLI(WrapperTester):
script_name = 'foo-script.py'
wrapper_source = 'cli-32.exe'
wrapper_name = 'foo.exe'
script_tmpl = textwrap.dedent("""
#!%(python_exe)s
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
""").lstrip()
def test_basic(self, tmpdir):
"""
When the copy of cli.exe, foo.exe in this example, runs, it examines
the path name it was run with and computes a Python script path name
by removing the '.exe' suffix and adding the '-script.py' suffix. (For
GUI programs, the suffix '-script.pyw' is added.) This is why we
named out script the way we did. Now we can run out script by running
the wrapper:
This example was a little pathological in that it exercised windows
(MS C runtime) quoting rules:
- Strings containing spaces are surrounded by double quotes.
- Double quotes in strings need to be escaped by preceding them with
back slashes.
- One or more backslashes preceding double quotes need to be escaped
by preceding each of them with back slashes.
"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'foo.exe'),
'arg1',
'arg 2',
'arg "2\\"',
'arg 4\\',
'arg5 a\\\\b',
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
'hello\nworld\n'
non-optimized
""").lstrip()
assert actual == expected
def test_with_options(self, tmpdir):
"""
Specifying Python Command-line Options
--------------------------------------
You can specify a single argument on the '#!' line. This can be used
to specify Python options like -O, to run in optimized mode or -i
to start the interactive interpreter. You can combine multiple
options as usual. For example, to run in optimized mode and
enter the interpreter after running the script, you could use -Oi:
"""
self.create_script(tmpdir)
tmpl = textwrap.dedent("""
#!%(python_exe)s -Oi
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
sys.ps1 = '---'
""").lstrip()
with (tmpdir / 'foo-script.py').open('w') as f:
f.write(self.prep_script(tmpl))
cmd = [str(tmpdir / 'foo.exe')]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
\foo-script.py
[]
''
---
""").lstrip()
assert actual == expected
class TestGUI(WrapperTester):
"""
Testing the GUI Version
-----------------------
"""
script_name = 'bar-script.pyw'
wrapper_source = 'gui-32.exe'
wrapper_name = 'bar.exe'
script_tmpl = textwrap.dedent("""
#!%(python_exe)s
import sys
f = open(sys.argv[1], 'wb')
bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
f.close()
""").strip()
def test_basic(self, tmpdir):
"""Test the GUI version with the simple scipt, bar-script.py"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'bar.exe'),
str(tmpdir / 'test_output.txt'),
'Test Argument',
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr
with (tmpdir / 'test_output.txt').open('rb') as f_out:
actual = f_out.read().decode('ascii')
assert actual == repr('Test Argument')

View File

@ -0,0 +1,8 @@
from __future__ import absolute_import
import textwrap
def DALS(s):
"dedent and left-strip"
return textwrap.dedent(s).lstrip()

View File

@ -0,0 +1,41 @@
import unicodedata
import sys
from setuptools.compat import unicode as decoded_string
# HFS Plus uses decomposed UTF-8
def decompose(path):
if isinstance(path, decoded_string):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
return path
def filesys_decode(path):
"""
Ensure that the given path is decoded,
NONE when no expected encoding works
"""
fs_enc = sys.getfilesystemencoding()
if isinstance(path, decoded_string):
return path
for enc in (fs_enc, "utf-8"):
try:
return path.decode(enc)
except UnicodeDecodeError:
continue
def try_encode(string, enc):
"turn unicode encoding into a functional routine"
try:
return string.encode(enc)
except UnicodeEncodeError:
return None

View File

@ -0,0 +1,11 @@
import os
import os.path
def cs_path_exists(fspath):
if not os.path.exists(fspath):
return False
# make absolute so we always have a directory
abspath = os.path.abspath(fspath)
directory, filename = os.path.split(abspath)
return filename in os.listdir(directory)

View File

@ -1 +1 @@
__version__ = '2.2'
__version__ = '12.0.5'

View File

@ -0,0 +1,29 @@
import platform
import ctypes
def windows_only(func):
if platform.system() != 'Windows':
return lambda *args, **kwargs: None
return func
@windows_only
def hide_file(path):
"""
Set the hidden attribute on a file or directory.
From http://stackoverflow.com/questions/19622133/
`path` must be text.
"""
__import__('ctypes.wintypes')
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
SetFileAttributes.restype = ctypes.wintypes.BOOL
FILE_ATTRIBUTE_HIDDEN = 0x02
ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
if not ret:
raise ctypes.WinError()