diff --git a/awx/lib/site-packages/README b/awx/lib/site-packages/README
index f96119c3cc..2ee7261096 100644
--- a/awx/lib/site-packages/README
+++ b/awx/lib/site-packages/README
@@ -7,7 +7,7 @@ anyjson==0.3.3 (anyjson/*)
argparse==1.2.1 (argparse.py, needed for Python 2.6 support)
Babel==1.3 (babel/*, excluded bin/pybabel)
billiard==3.3.0.16 (billiard/*, funtests/*, excluded _billiard.so)
-boto==2.27.0 (boto/*, excluded bin/asadmin, bin/bundle_image, bin/cfadmin,
+boto==2.32.1 (boto/*, excluded bin/asadmin, bin/bundle_image, bin/cfadmin,
bin/cq, bin/cwutil, bin/dynamodb_dump, bin/dynamodb_load, bin/elbadmin,
bin/fetch_file, bin/glacier, bin/instance_events, bin/kill_instance,
bin/launch_instance, bin/list_instances, bin/lss3, bin/mturk,
diff --git a/awx/lib/site-packages/boto/__init__.py b/awx/lib/site-packages/boto/__init__.py
index 35539123f2..3d90e901ef 100644
--- a/awx/lib/site-packages/boto/__init__.py
+++ b/awx/lib/site-packages/boto/__init__.py
@@ -34,10 +34,11 @@ import re
import sys
import logging
import logging.config
-import urlparse
+
+from boto.compat import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.27.0'
+__version__ = '2.32.1'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
@@ -492,7 +493,7 @@ def connect_ec2_endpoint(url, aws_access_key_id=None,
"""
from boto.ec2.regioninfo import RegionInfo
- purl = urlparse.urlparse(url)
+ purl = urlparse(url)
kwargs['port'] = purl.port
kwargs['host'] = purl.hostname
kwargs['path'] = purl.path
@@ -653,7 +654,7 @@ def connect_cloudsearch(aws_access_key_id=None,
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
- :rtype: :class:`boto.ec2.autoscale.CloudSearchConnection`
+ :rtype: :class:`boto.cloudsearch.layer2.Layer2`
:return: A connection to Amazon's CloudSearch service
"""
from boto.cloudsearch.layer2 import Layer2
@@ -661,6 +662,24 @@ def connect_cloudsearch(aws_access_key_id=None,
**kwargs)
+def connect_cloudsearch2(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ """
+ :type aws_access_key_id: string
+ :param aws_access_key_id: Your AWS Access Key ID
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: Your AWS Secret Access Key
+
+ :rtype: :class:`boto.cloudsearch2.layer2.Layer2`
+ :return: A connection to Amazon's CloudSearch2 service
+ """
+ from boto.cloudsearch2.layer2 import Layer2
+ return Layer2(aws_access_key_id, aws_secret_access_key,
+ **kwargs)
+
+
def connect_beanstalk(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
@@ -817,6 +836,28 @@ def connect_kinesis(aws_access_key_id=None,
**kwargs
)
+def connect_logs(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ """
+ Connect to Amazon CloudWatch Logs
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: Your AWS Access Key ID
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: Your AWS Secret Access Key
+
+ rtype: :class:`boto.kinesis.layer1.CloudWatchLogsConnection`
+ :return: A connection to the Amazon CloudWatch Logs service
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return CloudWatchLogsConnection(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ **kwargs
+ )
+
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
@@ -861,7 +902,7 @@ def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
version_id = None
generation = None
- # Manually parse URI components instead of using urlparse.urlparse because
+ # Manually parse URI components instead of using urlparse because
# what we're calling URIs don't really fit the standard syntax for URIs
# (the latter includes an optional host/net location part).
end_scheme_idx = uri_str.find('://')
diff --git a/awx/lib/site-packages/boto/auth.py b/awx/lib/site-packages/boto/auth.py
index 62446eb37d..6012962ae9 100644
--- a/awx/lib/site-packages/boto/auth.py
+++ b/awx/lib/site-packages/boto/auth.py
@@ -39,10 +39,9 @@ import hmac
import os
import sys
import time
-import urllib
-import urlparse
import posixpath
+from boto.compat import urllib, encodebytes
from boto.auth_handler import AuthHandler
from boto.exception import BotoClientError
@@ -65,9 +64,10 @@ class HmacKeys(object):
def update_provider(self, provider):
self._provider = provider
- self._hmac = hmac.new(self._provider.secret_key, digestmod=sha)
+ self._hmac = hmac.new(self._provider.secret_key.encode('utf-8'),
+ digestmod=sha)
if sha256:
- self._hmac_256 = hmac.new(self._provider.secret_key,
+ self._hmac_256 = hmac.new(self._provider.secret_key.encode('utf-8'),
digestmod=sha256)
else:
self._hmac_256 = None
@@ -83,13 +83,13 @@ class HmacKeys(object):
digestmod = sha256
else:
digestmod = sha
- return hmac.new(self._provider.secret_key,
+ return hmac.new(self._provider.secret_key.encode('utf-8'),
digestmod=digestmod)
def sign_string(self, string_to_sign):
new_hmac = self._get_hmac()
- new_hmac.update(string_to_sign)
- return base64.encodestring(new_hmac.digest()).strip()
+ new_hmac.update(string_to_sign.encode('utf-8'))
+ return encodebytes(new_hmac.digest()).decode('utf-8').strip()
def __getstate__(self):
pickled_dict = copy.copy(self.__dict__)
@@ -271,7 +271,7 @@ class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys):
req.headers['X-Amz-Security-Token'] = self._provider.security_token
string_to_sign, headers_to_sign = self.string_to_sign(req)
boto.log.debug('StringToSign:\n%s' % string_to_sign)
- hash_value = sha256(string_to_sign).digest()
+ hash_value = sha256(string_to_sign.encode('utf-8')).digest()
b64_hmac = self.sign_string(hash_value)
s = "AWS3 AWSAccessKeyId=%s," % self._provider.access_key
s += "Algorithm=%s," % self.algorithm()
@@ -298,6 +298,9 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
self.region_name = region_name
def _sign(self, key, msg, hex=False):
+ if not isinstance(key, bytes):
+ key = key.encode('utf-8')
+
if hex:
sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
else:
@@ -310,7 +313,6 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
in the StringToSign.
"""
host_header_value = self.host_header(self.host, http_request)
- headers_to_sign = {}
headers_to_sign = {'Host': host_header_value}
for name, value in http_request.headers.items():
lname = name.lower()
@@ -330,8 +332,8 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
pairs = []
for pname in parameter_names:
pval = boto.utils.get_utf8_value(http_request.params[pname])
- pairs.append(urllib.quote(pname, safe='') + '=' +
- urllib.quote(pval, safe='-_~'))
+ pairs.append(urllib.parse.quote(pname, safe='') + '=' +
+ urllib.parse.quote(pval, safe='-_~'))
return '&'.join(pairs)
def canonical_query_string(self, http_request):
@@ -342,8 +344,8 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
l = []
for param in sorted(http_request.params):
value = boto.utils.get_utf8_value(http_request.params[param])
- l.append('%s=%s' % (urllib.quote(param, safe='-_.~'),
- urllib.quote(value, safe='-_.~')))
+ l.append('%s=%s' % (urllib.parse.quote(param, safe='-_.~'),
+ urllib.parse.quote(value.decode('utf-8'), safe='-_.~')))
return '&'.join(l)
def canonical_headers(self, headers_to_sign):
@@ -376,7 +378,7 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
# in windows normpath('/') will be '\\' so we chane it back to '/'
normalized = posixpath.normpath(path).replace('\\','/')
# Then urlencode whatever's left.
- encoded = urllib.quote(normalized)
+ encoded = urllib.parse.quote(normalized)
if len(path) > 1 and path.endswith('/'):
encoded += '/'
return encoded
@@ -388,7 +390,9 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
# the entire body into memory.
if hasattr(body, 'seek') and hasattr(body, 'read'):
return boto.utils.compute_hash(body, hash_algorithm=sha256)[0]
- return sha256(http_request.body).hexdigest()
+ elif not isinstance(body, bytes):
+ body = body.encode('utf-8')
+ return sha256(body).hexdigest()
def canonical_request(self, http_request):
cr = [http_request.method.upper()]
@@ -462,7 +466,7 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
sts = ['AWS4-HMAC-SHA256']
sts.append(http_request.headers['X-Amz-Date'])
sts.append(self.credential_scope(http_request))
- sts.append(sha256(canonical_request).hexdigest())
+ sts.append(sha256(canonical_request.encode('utf-8')).hexdigest())
return '\n'.join(sts)
def signature(self, http_request, string_to_sign):
@@ -538,11 +542,11 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
def canonical_uri(self, http_request):
# S3 does **NOT** do path normalization that SigV4 typically does.
# Urlencode the path, **NOT** ``auth_path`` (because vhosting).
- path = urlparse.urlparse(http_request.path)
+ path = urllib.parse.urlparse(http_request.path)
# Because some quoting may have already been applied, let's back it out.
- unquoted = urllib.unquote(path.path)
+ unquoted = urllib.parse.unquote(path.path)
# Requote, this time addressing all characters.
- encoded = urllib.quote(unquoted)
+ encoded = urllib.parse.quote(unquoted)
return encoded
def host_header(self, host, http_request):
@@ -558,7 +562,6 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
in the StringToSign.
"""
host_header_value = self.host_header(self.host, http_request)
- headers_to_sign = {}
headers_to_sign = {'Host': host_header_value}
for name, value in http_request.headers.items():
lname = name.lower()
@@ -602,6 +605,11 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
if part == 's3':
# If it's by itself, the region is the previous part.
region_name = parts[-offset]
+
+ # Unless it's Vhosted classic
+ if region_name == 'amazonaws':
+ region_name = 'us-east-1'
+
break
elif part.startswith('s3-'):
region_name = self.clean_region_name(part)
@@ -628,14 +636,14 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
# **ON** the ``path/auth_path``.
# Rip them apart, so the ``auth_path/params`` can be signed
# appropriately.
- parsed_path = urlparse.urlparse(modified_req.auth_path)
+ parsed_path = urllib.parse.urlparse(modified_req.auth_path)
modified_req.auth_path = parsed_path.path
if modified_req.params is None:
modified_req.params = {}
raw_qs = parsed_path.query
- existing_qs = urlparse.parse_qs(
+ existing_qs = urllib.parse.parse_qs(
raw_qs,
keep_blank_values=True
)
@@ -666,6 +674,54 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
req = self.mangle_path_and_params(req)
return super(S3HmacAuthV4Handler, self).add_auth(req, **kwargs)
+ def presign(self, req, expires, iso_date=None):
+ """
+ Presign a request using SigV4 query params. Takes in an HTTP request
+ and an expiration time in seconds and returns a URL.
+
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+ """
+ if iso_date is None:
+ iso_date = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
+
+ region = self.determine_region_name(req.host)
+ service = self.determine_service_name(req.host)
+
+ params = {
+ 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
+ 'X-Amz-Credential': '%s/%s/%s/%s/aws4_request' % (
+ self._provider.access_key,
+ iso_date[:8],
+ region,
+ service
+ ),
+ 'X-Amz-Date': iso_date,
+ 'X-Amz-Expires': expires,
+ 'X-Amz-SignedHeaders': 'host'
+ }
+
+ if self._provider.security_token:
+ params['X-Amz-Security-Token'] = self._provider.security_token
+
+ req.params.update(params)
+
+ cr = self.canonical_request(req)
+
+ # We need to replace the payload SHA with a constant
+ cr = '\n'.join(cr.split('\n')[:-1]) + '\nUNSIGNED-PAYLOAD'
+
+ # Date header is expected for string_to_sign, but unused otherwise
+ req.headers['X-Amz-Date'] = iso_date
+
+ sts = self.string_to_sign(req, cr)
+ signature = self.signature(req, sts)
+
+ # Add signature to params now that we have it
+ req.params['X-Amz-Signature'] = signature
+
+ return 'https://%s%s?%s' % (req.host, req.path,
+ urllib.parse.urlencode(req.params))
+
class QueryAuthHandler(AuthHandler):
"""
@@ -679,16 +735,16 @@ class QueryAuthHandler(AuthHandler):
capability = ['pure-query']
def _escape_value(self, value):
- # Would normally be ``return urllib.quote(value)``.
+ # Would normally be ``return urllib.parse.quote(value)``.
return value
def _build_query_string(self, params):
- keys = params.keys()
- keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
+ keys = list(params.keys())
+ keys.sort(key=lambda x: x.lower())
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
- pairs.append(key + '=' + self._escape_value(val))
+ pairs.append(key + '=' + self._escape_value(val.decode('utf-8')))
return '&'.join(pairs)
def add_auth(self, http_request, **kwargs):
@@ -725,7 +781,7 @@ class QuerySignatureHelper(HmacKeys):
boto.log.debug('query_string: %s Signature: %s' % (qs, signature))
if http_request.method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
- http_request.body = qs + '&Signature=' + urllib.quote_plus(signature)
+ http_request.body = qs + '&Signature=' + urllib.parse.quote_plus(signature)
http_request.headers['Content-Length'] = str(len(http_request.body))
else:
http_request.body = ''
@@ -733,7 +789,7 @@ class QuerySignatureHelper(HmacKeys):
# already be there, we need to get rid of that and rebuild it
http_request.path = http_request.path.split('?')[0]
http_request.path = (http_request.path + '?' + qs +
- '&Signature=' + urllib.quote_plus(signature))
+ '&Signature=' + urllib.parse.quote_plus(signature))
class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler):
@@ -746,13 +802,13 @@ class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler):
boto.log.debug('using _calc_signature_0')
hmac = self._get_hmac()
s = params['Action'] + params['Timestamp']
- hmac.update(s)
+ hmac.update(s.encode('utf-8'))
keys = params.keys()
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
- pairs.append(key + '=' + urllib.quote(val))
+ pairs.append(key + '=' + urllib.parse.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
@@ -777,10 +833,10 @@ class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler):
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
pairs = []
for key in keys:
- hmac.update(key)
+ hmac.update(key.encode('utf-8'))
val = boto.utils.get_utf8_value(params[key])
hmac.update(val)
- pairs.append(key + '=' + urllib.quote(val))
+ pairs.append(key + '=' + urllib.parse.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
@@ -803,13 +859,13 @@ class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler):
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
- pairs.append(urllib.quote(key, safe='') + '=' +
- urllib.quote(val, safe='-_~'))
+ pairs.append(urllib.parse.quote(key, safe='') + '=' +
+ urllib.parse.quote(val, safe='-_~'))
qs = '&'.join(pairs)
boto.log.debug('query string: %s' % qs)
string_to_sign += qs
boto.log.debug('string_to_sign: %s' % string_to_sign)
- hmac.update(string_to_sign)
+ hmac.update(string_to_sign.encode('utf-8'))
b64 = base64.b64encode(hmac.digest())
boto.log.debug('len(b64)=%d' % len(b64))
boto.log.debug('base64 encoded digest: %s' % b64)
@@ -841,7 +897,7 @@ class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler):
# already be there, we need to get rid of that and rebuild it
req.path = req.path.split('?')[0]
req.path = (req.path + '?' + qs +
- '&Signature=' + urllib.quote_plus(signature))
+ '&Signature=' + urllib.parse.quote_plus(signature))
def get_auth_handler(host, config, provider, requested_capability=None):
@@ -904,6 +960,9 @@ def detect_potential_sigv4(func):
return ['hmac-v4']
if hasattr(self, 'region'):
+ # If you're making changes here, you should also check
+ # ``boto/iam/connection.py``, as several things there are also
+ # endpoint-related.
if getattr(self.region, 'endpoint', ''):
if '.cn-' in self.region.endpoint:
return ['hmac-v4']
@@ -921,6 +980,9 @@ def detect_potential_s3sigv4(func):
return ['hmac-v4-s3']
if hasattr(self, 'host'):
+ # If you're making changes here, you should also check
+ # ``boto/iam/connection.py``, as several things there are also
+ # endpoint-related.
if '.cn-' in self.host:
return ['hmac-v4-s3']
diff --git a/awx/lib/site-packages/boto/auth_handler.py b/awx/lib/site-packages/boto/auth_handler.py
index ab2d317033..e6d131aff0 100644
--- a/awx/lib/site-packages/boto/auth_handler.py
+++ b/awx/lib/site-packages/boto/auth_handler.py
@@ -23,7 +23,7 @@
Defines an interface which all Auth handlers need to implement.
"""
-from plugin import Plugin
+from boto.plugin import Plugin
class NotReadyToAuthenticate(Exception):
pass
diff --git a/awx/lib/site-packages/boto/beanstalk/exception.py b/awx/lib/site-packages/boto/beanstalk/exception.py
index f6f9ffad55..0fbd4ab9fa 100644
--- a/awx/lib/site-packages/boto/beanstalk/exception.py
+++ b/awx/lib/site-packages/boto/beanstalk/exception.py
@@ -4,12 +4,14 @@ from boto.exception import BotoServerError
def simple(e):
- err = json.loads(e.error_message)
- code = err['Error']['Code']
+ code = e.code
+
+ if code.endswith('Exception'):
+ code = code.rstrip('Exception')
try:
# Dynamically get the error class.
- simple_e = getattr(sys.modules[__name__], code)(e, err)
+ simple_e = getattr(sys.modules[__name__], code)(e)
except AttributeError:
# Return original exception on failure.
return e
@@ -18,12 +20,9 @@ def simple(e):
class SimpleException(BotoServerError):
- def __init__(self, e, err):
+ def __init__(self, e):
super(SimpleException, self).__init__(e.status, e.reason, e.body)
- self.body = e.error_message
- self.request_id = err['RequestId']
- self.error_code = err['Error']['Code']
- self.error_message = err['Error']['Message']
+ self.error_message = self.message
def __repr__(self):
return self.__class__.__name__ + ': ' + self.error_message
diff --git a/awx/lib/site-packages/boto/beanstalk/layer1.py b/awx/lib/site-packages/boto/beanstalk/layer1.py
index 5963f50e9c..e72ee23ec5 100644
--- a/awx/lib/site-packages/boto/beanstalk/layer1.py
+++ b/awx/lib/site-packages/boto/beanstalk/layer1.py
@@ -63,7 +63,7 @@ class Layer1(AWSQueryConnection):
def _get_response(self, action, params, path='/', verb='GET'):
params['ContentType'] = 'JSON'
response = self.make_request(action, params, path, verb)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
@@ -351,9 +351,9 @@ class Layer1(AWSQueryConnection):
self.build_list_params(params, options_to_remove,
'OptionsToRemove.member')
if tier_name and tier_type and tier_version:
- params['Tier.member.Name'] = tier_name
- params['Tier.member.Type'] = tier_type
- params['Tier.member.Version'] = tier_version
+ params['Tier.Name'] = tier_name
+ params['Tier.Type'] = tier_type
+ params['Tier.Version'] = tier_version
return self._get_response('CreateEnvironment', params)
def create_storage_location(self):
@@ -1138,9 +1138,9 @@ class Layer1(AWSQueryConnection):
self.build_list_params(params, options_to_remove,
'OptionsToRemove.member')
if tier_name and tier_type and tier_version:
- params['Tier.member.Name'] = tier_name
- params['Tier.member.Type'] = tier_type
- params['Tier.member.Version'] = tier_version
+ params['Tier.Name'] = tier_name
+ params['Tier.Type'] = tier_type
+ params['Tier.Version'] = tier_version
return self._get_response('UpdateEnvironment', params)
def validate_configuration_settings(self, application_name,
diff --git a/awx/lib/site-packages/boto/beanstalk/response.py b/awx/lib/site-packages/boto/beanstalk/response.py
index 2d071bc91e..8128ba1fed 100644
--- a/awx/lib/site-packages/boto/beanstalk/response.py
+++ b/awx/lib/site-packages/boto/beanstalk/response.py
@@ -1,5 +1,6 @@
"""Classify responses from layer1 and strict type values."""
from datetime import datetime
+from boto.compat import six
class BaseObject(object):
@@ -7,7 +8,7 @@ class BaseObject(object):
def __repr__(self):
result = self.__class__.__name__ + '{ '
counter = 0
- for key, value in self.__dict__.iteritems():
+ for key, value in six.iteritems(self.__dict__):
# first iteration no comma
counter += 1
if counter > 1:
diff --git a/awx/lib/site-packages/boto/beanstalk/wrapper.py b/awx/lib/site-packages/boto/beanstalk/wrapper.py
index aa9a7d279b..eea1124a9e 100644
--- a/awx/lib/site-packages/boto/beanstalk/wrapper.py
+++ b/awx/lib/site-packages/boto/beanstalk/wrapper.py
@@ -9,7 +9,7 @@ def beanstalk_wrapper(func, name):
def _wrapped_low_level_api(*args, **kwargs):
try:
response = func(*args, **kwargs)
- except BotoServerError, e:
+ except BotoServerError as e:
raise exception.simple(e)
# Turn 'this_is_a_function_name' into 'ThisIsAFunctionNameResponse'.
cls_name = ''.join([part.capitalize() for part in name.split('_')]) + 'Response'
diff --git a/awx/lib/site-packages/boto/cloudformation/__init__.py b/awx/lib/site-packages/boto/cloudformation/__init__.py
index 84047e2b0b..d602dc39e8 100644
--- a/awx/lib/site-packages/boto/cloudformation/__init__.py
+++ b/awx/lib/site-packages/boto/cloudformation/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from connection import CloudFormationConnection
+from boto.cloudformation.connection import CloudFormationConnection
from boto.regioninfo import RegionInfo, get_regions, load_regions
RegionData = load_regions().get('cloudformation')
diff --git a/awx/lib/site-packages/boto/cloudformation/connection.py b/awx/lib/site-packages/boto/cloudformation/connection.py
index 40ff8b63bd..6c31ac53b3 100644
--- a/awx/lib/site-packages/boto/cloudformation/connection.py
+++ b/awx/lib/site-packages/boto/cloudformation/connection.py
@@ -271,7 +271,7 @@ class CloudFormationConnection(AWSQueryConnection):
:return: Parsed JSON response data
"""
response = self.make_request(call, params, path, method)
- body = response.read()
+ body = response.read().decode('utf-8')
if response.status == 200:
body = json.loads(body)
return body
diff --git a/awx/lib/site-packages/boto/cloudfront/distribution.py b/awx/lib/site-packages/boto/cloudfront/distribution.py
index 5566bdfec2..7131c86c83 100644
--- a/awx/lib/site-packages/boto/cloudfront/distribution.py
+++ b/awx/lib/site-packages/boto/cloudfront/distribution.py
@@ -22,7 +22,7 @@
import uuid
import base64
import time
-from boto.compat import json
+from boto.compat import six, json
from boto.cloudfront.identity import OriginAccessIdentity
from boto.cloudfront.object import Object, StreamingObject
from boto.cloudfront.signers import ActiveTrustedSigners, TrustedSigners
@@ -665,7 +665,7 @@ class Distribution(object):
raise ValueError("You must specify one of private_key_file or private_key_string")
# If private_key_file is a file name, open it and read it
if private_key_string is None:
- if isinstance(private_key_file, basestring):
+ if isinstance(private_key_file, six.string_types):
with open(private_key_file, 'r') as file_handle:
private_key_string = file_handle.read()
# Otherwise, treat it like a file
diff --git a/awx/lib/site-packages/boto/cloudfront/invalidation.py b/awx/lib/site-packages/boto/cloudfront/invalidation.py
index 91ba89d9ee..58adf81fd3 100644
--- a/awx/lib/site-packages/boto/cloudfront/invalidation.py
+++ b/awx/lib/site-packages/boto/cloudfront/invalidation.py
@@ -20,8 +20,8 @@
# IN THE SOFTWARE.
import uuid
-import urllib
+from boto.compat import urllib
from boto.resultset import ResultSet
@@ -71,7 +71,7 @@ class InvalidationBatch(object):
"""Escape a path, make sure it begins with a slash and contains no invalid characters"""
if not p[0] == "/":
p = "/%s" % p
- return urllib.quote(p)
+ return urllib.parse.quote(p)
def to_xml(self):
"""Get this batch as XML"""
diff --git a/awx/lib/site-packages/boto/cloudfront/origin.py b/awx/lib/site-packages/boto/cloudfront/origin.py
index 57af846ef9..b88ec7e7f8 100644
--- a/awx/lib/site-packages/boto/cloudfront/origin.py
+++ b/awx/lib/site-packages/boto/cloudfront/origin.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from identity import OriginAccessIdentity
+from boto.cloudfront.identity import OriginAccessIdentity
def get_oai_value(origin_access_identity):
if isinstance(origin_access_identity, OriginAccessIdentity):
diff --git a/awx/lib/site-packages/boto/cloudsearch/document.py b/awx/lib/site-packages/boto/cloudsearch/document.py
index 1f17026fbb..0a1d9db22c 100644
--- a/awx/lib/site-packages/boto/cloudsearch/document.py
+++ b/awx/lib/site-packages/boto/cloudsearch/document.py
@@ -221,13 +221,15 @@ class CommitResponse(object):
self.doc_service = doc_service
self.sdf = sdf
+ _body = response.content.decode('utf-8')
+
try:
- self.content = json.loads(response.content)
+ self.content = json.loads(_body)
except:
boto.log.error('Error indexing documents.\nResponse Content:\n{0}\n\n'
- 'SDF:\n{1}'.format(response.content, self.sdf))
+ 'SDF:\n{1}'.format(_body, self.sdf))
raise boto.exception.BotoServerError(self.response.status_code, '',
- body=response.content)
+ body=_body)
self.status = self.content['status']
if self.status == 'error':
@@ -238,6 +240,9 @@ class CommitResponse(object):
raise EncodingError("Illegal Unicode character in document")
elif e == "The Content-Length is too long":
raise ContentTooLongError("Content was too long")
+ if 'adds' not in self.content or 'deletes' not in self.content:
+ raise SearchServiceException("Error indexing documents"
+ " => %s" % self.content.get('message', ''))
else:
self.errors = []
diff --git a/awx/lib/site-packages/boto/cloudsearch/domain.py b/awx/lib/site-packages/boto/cloudsearch/domain.py
index 9497325c76..9800b17512 100644
--- a/awx/lib/site-packages/boto/cloudsearch/domain.py
+++ b/awx/lib/site-packages/boto/cloudsearch/domain.py
@@ -24,19 +24,19 @@
import boto
from boto.compat import json
-from .optionstatus import OptionStatus
-from .optionstatus import IndexFieldStatus
-from .optionstatus import ServicePoliciesStatus
-from .optionstatus import RankExpressionStatus
-from .document import DocumentServiceConnection
-from .search import SearchConnection
+from boto.cloudsearch.optionstatus import OptionStatus
+from boto.cloudsearch.optionstatus import IndexFieldStatus
+from boto.cloudsearch.optionstatus import ServicePoliciesStatus
+from boto.cloudsearch.optionstatus import RankExpressionStatus
+from boto.cloudsearch.document import DocumentServiceConnection
+from boto.cloudsearch.search import SearchConnection
def handle_bool(value):
if value in [True, 'true', 'True', 'TRUE', 1]:
return True
return False
-
+
class Domain(object):
"""
A Cloudsearch domain.
@@ -118,7 +118,7 @@ class Domain(object):
@created.setter
def created(self, value):
self._created = handle_bool(value)
-
+
@property
def deleted(self):
return self._deleted
@@ -126,7 +126,7 @@ class Domain(object):
@deleted.setter
def deleted(self, value):
self._deleted = handle_bool(value)
-
+
@property
def processing(self):
return self._processing
@@ -134,7 +134,7 @@ class Domain(object):
@processing.setter
def processing(self, value):
self._processing = handle_bool(value)
-
+
@property
def requires_index_documents(self):
return self._requires_index_documents
@@ -142,7 +142,7 @@ class Domain(object):
@requires_index_documents.setter
def requires_index_documents(self, value):
self._requires_index_documents = handle_bool(value)
-
+
@property
def search_partition_count(self):
return self._search_partition_count
@@ -150,7 +150,7 @@ class Domain(object):
@search_partition_count.setter
def search_partition_count(self, value):
self._search_partition_count = int(value)
-
+
@property
def search_instance_count(self):
return self._search_instance_count
@@ -158,7 +158,7 @@ class Domain(object):
@search_instance_count.setter
def search_instance_count(self, value):
self._search_instance_count = int(value)
-
+
@property
def num_searchable_docs(self):
return self._num_searchable_docs
@@ -166,7 +166,7 @@ class Domain(object):
@num_searchable_docs.setter
def num_searchable_docs(self, value):
self._num_searchable_docs = int(value)
-
+
@property
def name(self):
return self.domain_name
diff --git a/awx/lib/site-packages/boto/cloudsearch/layer1.py b/awx/lib/site-packages/boto/cloudsearch/layer1.py
index 92ebe08207..69132e39ce 100644
--- a/awx/lib/site-packages/boto/cloudsearch/layer1.py
+++ b/awx/lib/site-packages/boto/cloudsearch/layer1.py
@@ -680,7 +680,7 @@ class Layer1(AWSQueryConnection):
'update_stemming_options_result',
'stems')
params = {'DomainName': domain_name,
- 'Stems': stems}
+ 'Stems': stems}
return self.get_response(doc_path, 'UpdateStemmingOptions',
params, verb='POST')
diff --git a/awx/lib/site-packages/boto/cloudsearch/layer2.py b/awx/lib/site-packages/boto/cloudsearch/layer2.py
index 4189b5bb1e..b565d4b5b9 100644
--- a/awx/lib/site-packages/boto/cloudsearch/layer2.py
+++ b/awx/lib/site-packages/boto/cloudsearch/layer2.py
@@ -22,8 +22,8 @@
# IN THE SOFTWARE.
#
-from .layer1 import Layer1
-from .domain import Domain
+from boto.cloudsearch.layer1 import Layer1
+from boto.cloudsearch.domain import Domain
class Layer2(object):
diff --git a/awx/lib/site-packages/boto/cloudsearch/search.py b/awx/lib/site-packages/boto/cloudsearch/search.py
index a103993ebb..70ea479bec 100644
--- a/awx/lib/site-packages/boto/cloudsearch/search.py
+++ b/awx/lib/site-packages/boto/cloudsearch/search.py
@@ -22,9 +22,7 @@
# IN THE SOFTWARE.
#
from math import ceil
-import time
-import boto
-from boto.compat import json
+from boto.compat import json, map, six
import requests
@@ -52,7 +50,7 @@ class SearchResults(object):
self.facets = {}
if 'facets' in attrs:
- for (facet, values) in attrs['facets'].iteritems():
+ for (facet, values) in attrs['facets'].items():
if 'constraints' in values:
self.facets[facet] = dict((k, v) for (k, v) in map(lambda x: (x['value'], x['count']), values['constraints']))
@@ -129,19 +127,19 @@ class Query(object):
params['facet'] = ','.join(self.facet)
if self.facet_constraints:
- for k, v in self.facet_constraints.iteritems():
+ for k, v in six.iteritems(self.facet_constraints):
params['facet-%s-constraints' % k] = v
if self.facet_sort:
- for k, v in self.facet_sort.iteritems():
+ for k, v in six.iteritems(self.facet_sort):
params['facet-%s-sort' % k] = v
if self.facet_top_n:
- for k, v in self.facet_top_n.iteritems():
+ for k, v in six.iteritems(self.facet_top_n):
params['facet-%s-top-n' % k] = v
if self.t:
- for k, v in self.t.iteritems():
+ for k, v in six.iteritems(self.t):
params['t-%s' % k] = v
return params
@@ -288,19 +286,20 @@ class SearchConnection(object):
params = query.to_params()
r = requests.get(url, params=params)
+ body = r.content.decode('utf-8')
try:
- data = json.loads(r.content)
- except ValueError, e:
+ data = json.loads(body)
+ except ValueError as e:
if r.status_code == 403:
msg = ''
import re
- g = re.search('
403 Forbidden
([^<]+)<', r.content)
+ g = re.search('403 Forbidden
([^<]+)<', body)
try:
msg = ': %s' % (g.groups()[0].strip())
except AttributeError:
pass
raise SearchServiceException('Authentication error from Amazon%s' % msg)
- raise SearchServiceException("Got non-json response from Amazon. %s" % r.content, query)
+ raise SearchServiceException("Got non-json response from Amazon. %s" % body, query)
if 'messages' in data and 'error' in data:
for m in data['messages']:
diff --git a/awx/lib/site-packages/boto/cloudsearch/sourceattribute.py b/awx/lib/site-packages/boto/cloudsearch/sourceattribute.py
index c3435079de..2883314722 100644
--- a/awx/lib/site-packages/boto/cloudsearch/sourceattribute.py
+++ b/awx/lib/site-packages/boto/cloudsearch/sourceattribute.py
@@ -72,4 +72,3 @@ class SourceAttribute(object):
valid = '|'.join(self.ValidDataFunctions)
raise ValueError('data_function must be one of: %s' % valid)
self._data_function = value
-
diff --git a/awx/lib/site-packages/boto/cloudsearch2/__init__.py b/awx/lib/site-packages/boto/cloudsearch2/__init__.py
new file mode 100644
index 0000000000..d14c917935
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/__init__.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+from boto.regioninfo import get_regions
+
+
+def regions():
+ """
+ Get all available regions for the Amazon CloudSearch service.
+
+ :rtype: list
+ :return: A list of :class:`boto.regioninfo.RegionInfo`
+ """
+ import boto.cloudsearch2.layer1
+ return get_regions(
+ 'cloudsearch',
+ connection_cls=boto.cloudsearch2.layer1.CloudSearchConnection
+ )
+
+
+def connect_to_region(region_name, **kw_params):
+ for region in regions():
+ if region.name == region_name:
+ return region.connect(**kw_params)
+ return None
diff --git a/awx/lib/site-packages/boto/cloudsearch2/document.py b/awx/lib/site-packages/boto/cloudsearch2/document.py
new file mode 100644
index 0000000000..3244a47ad0
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/document.py
@@ -0,0 +1,275 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+import boto.exception
+from boto.compat import json
+import requests
+import boto
+
+
+class SearchServiceException(Exception):
+ pass
+
+
+class CommitMismatchError(Exception):
+ # Let's do some extra work and let the user handle errors on his/her own.
+
+ errors = None
+
+
+class EncodingError(Exception):
+ """
+ Content sent for Cloud Search indexing was incorrectly encoded.
+
+ This usually happens when a document is marked as unicode but non-unicode
+ characters are present.
+ """
+ pass
+
+
+class ContentTooLongError(Exception):
+ """
+ Content sent for Cloud Search indexing was too long
+
+ This will usually happen when documents queued for indexing add up to more
+ than the limit allowed per upload batch (5MB)
+
+ """
+ pass
+
+
+class DocumentServiceConnection(object):
+ """
+ A CloudSearch document service.
+
+ The DocumentServiceConection is used to add, remove and update documents in
+ CloudSearch. Commands are uploaded to CloudSearch in SDF (Search Document
+ Format).
+
+ To generate an appropriate SDF, use :func:`add` to add or update documents,
+ as well as :func:`delete` to remove documents.
+
+ Once the set of documents is ready to be index, use :func:`commit` to send
+ the commands to CloudSearch.
+
+ If there are a lot of documents to index, it may be preferable to split the
+ generation of SDF data and the actual uploading into CloudSearch. Retrieve
+ the current SDF with :func:`get_sdf`. If this file is the uploaded into S3,
+ it can be retrieved back afterwards for upload into CloudSearch using
+ :func:`add_sdf_from_s3`.
+
+ The SDF is not cleared after a :func:`commit`. If you wish to continue
+ using the DocumentServiceConnection for another batch upload of commands,
+ you will need to :func:`clear_sdf` first to stop the previous batch of
+ commands from being uploaded again.
+
+ """
+
+ def __init__(self, domain=None, endpoint=None):
+ self.domain = domain
+ self.endpoint = endpoint
+ if not self.endpoint:
+ self.endpoint = domain.doc_service_endpoint
+ self.documents_batch = []
+ self._sdf = None
+
+ def add(self, _id, fields):
+ """
+ Add a document to be processed by the DocumentService
+
+ The document will not actually be added until :func:`commit` is called
+
+ :type _id: string
+ :param _id: A unique ID used to refer to this document.
+
+ :type fields: dict
+ :param fields: A dictionary of key-value pairs to be uploaded .
+ """
+
+ d = {'type': 'add', 'id': _id, 'fields': fields}
+ self.documents_batch.append(d)
+
+ def delete(self, _id):
+ """
+ Schedule a document to be removed from the CloudSearch service
+
+ The document will not actually be scheduled for removal until
+ :func:`commit` is called
+
+ :type _id: string
+ :param _id: The unique ID of this document.
+ """
+
+ d = {'type': 'delete', 'id': _id}
+ self.documents_batch.append(d)
+
+ def get_sdf(self):
+ """
+ Generate the working set of documents in Search Data Format (SDF)
+
+ :rtype: string
+ :returns: JSON-formatted string of the documents in SDF
+ """
+
+ return self._sdf if self._sdf else json.dumps(self.documents_batch)
+
+ def clear_sdf(self):
+ """
+ Clear the working documents from this DocumentServiceConnection
+
+ This should be used after :func:`commit` if the connection will be
+ reused for another set of documents.
+ """
+
+ self._sdf = None
+ self.documents_batch = []
+
+ def add_sdf_from_s3(self, key_obj):
+ """
+ Load an SDF from S3
+
+ Using this method will result in documents added through
+ :func:`add` and :func:`delete` being ignored.
+
+ :type key_obj: :class:`boto.s3.key.Key`
+ :param key_obj: An S3 key which contains an SDF
+ """
+ #@todo:: (lucas) would be nice if this could just take an s3://uri..."
+
+ self._sdf = key_obj.get_contents_as_string()
+
+ def commit(self):
+ """
+ Actually send an SDF to CloudSearch for processing
+
+ If an SDF file has been explicitly loaded it will be used. Otherwise,
+ documents added through :func:`add` and :func:`delete` will be used.
+
+ :rtype: :class:`CommitResponse`
+ :returns: A summary of documents added and deleted
+ """
+
+ sdf = self.get_sdf()
+
+ if ': null' in sdf:
+ boto.log.error('null value in sdf detected. This will probably '
+ 'raise 500 error.')
+ index = sdf.index(': null')
+ boto.log.error(sdf[index - 100:index + 100])
+
+ api_version = '2013-01-01'
+ if self.domain:
+ api_version = self.domain.layer1.APIVersion
+ url = "http://%s/%s/documents/batch" % (self.endpoint, api_version)
+
+ # Keep-alive is automatic in a post-1.0 requests world.
+ session = requests.Session()
+ adapter = requests.adapters.HTTPAdapter(
+ pool_connections=20,
+ pool_maxsize=50,
+ max_retries=5
+ )
+ session.mount('http://', adapter)
+ session.mount('https://', adapter)
+ r = session.post(url, data=sdf,
+ headers={'Content-Type': 'application/json'})
+
+ return CommitResponse(r, self, sdf)
+
+
+class CommitResponse(object):
+ """Wrapper for response to Cloudsearch document batch commit.
+
+ :type response: :class:`requests.models.Response`
+ :param response: Response from Cloudsearch /documents/batch API
+
+ :type doc_service: :class:`boto.cloudsearch2.document.DocumentServiceConnection`
+ :param doc_service: Object containing the documents posted and methods to
+ retry
+
+ :raises: :class:`boto.exception.BotoServerError`
+ :raises: :class:`boto.cloudsearch2.document.SearchServiceException`
+ :raises: :class:`boto.cloudsearch2.document.EncodingError`
+ :raises: :class:`boto.cloudsearch2.document.ContentTooLongError`
+ """
+ def __init__(self, response, doc_service, sdf):
+ self.response = response
+ self.doc_service = doc_service
+ self.sdf = sdf
+
+ _body = response.content.decode('utf-8')
+
+ try:
+ self.content = json.loads(_body)
+ except:
+ boto.log.error('Error indexing documents.\nResponse Content:\n{0}'
+ '\n\nSDF:\n{1}'.format(_body, self.sdf))
+ raise boto.exception.BotoServerError(self.response.status_code, '',
+ body=_body)
+
+ self.status = self.content['status']
+ if self.status == 'error':
+ self.errors = [e.get('message') for e in self.content.get('errors',
+ [])]
+ for e in self.errors:
+ if "Illegal Unicode character" in e:
+ raise EncodingError("Illegal Unicode character in document")
+ elif e == "The Content-Length is too long":
+ raise ContentTooLongError("Content was too long")
+ else:
+ self.errors = []
+
+ self.adds = self.content['adds']
+ self.deletes = self.content['deletes']
+ self._check_num_ops('add', self.adds)
+ self._check_num_ops('delete', self.deletes)
+
+ def _check_num_ops(self, type_, response_num):
+ """Raise exception if number of ops in response doesn't match commit
+
+ :type type_: str
+ :param type_: Type of commit operation: 'add' or 'delete'
+
+ :type response_num: int
+ :param response_num: Number of adds or deletes in the response.
+
+ :raises: :class:`boto.cloudsearch2.document.CommitMismatchError`
+ """
+ commit_num = len([d for d in self.doc_service.documents_batch
+ if d['type'] == type_])
+
+ if response_num != commit_num:
+ boto.log.debug(self.response.content)
+ # There will always be a commit mismatch error if there is any
+ # errors on cloudsearch. self.errors gets lost when this
+ # CommitMismatchError is raised. Whoever is using boto has no idea
+ # why their commit failed. They can't even notify the user of the
+ # cause by parsing the error messages from amazon. So let's
+ # attach the self.errors to the exceptions if we already spent
+ # time and effort collecting them out of the response.
+ exc = CommitMismatchError(
+ 'Incorrect number of {0}s returned. Commit: {1} Response: {2}'
+ .format(type_, commit_num, response_num)
+ )
+ exc.errors = self.errors
+ raise exc
diff --git a/awx/lib/site-packages/boto/cloudsearch2/domain.py b/awx/lib/site-packages/boto/cloudsearch2/domain.py
new file mode 100644
index 0000000000..956af216d8
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/domain.py
@@ -0,0 +1,542 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+from boto.cloudsearch2.optionstatus import IndexFieldStatus
+from boto.cloudsearch2.optionstatus import ServicePoliciesStatus
+from boto.cloudsearch2.optionstatus import ExpressionStatus
+from boto.cloudsearch2.optionstatus import AvailabilityOptionsStatus
+from boto.cloudsearch2.optionstatus import ScalingParametersStatus
+from boto.cloudsearch2.document import DocumentServiceConnection
+from boto.cloudsearch2.search import SearchConnection
+
+
+def handle_bool(value):
+ if value in [True, 'true', 'True', 'TRUE', 1]:
+ return True
+ return False
+
+
+class Domain(object):
+ """
+ A Cloudsearch domain.
+
+ :ivar name: The name of the domain.
+
+ :ivar id: The internally generated unique identifier for the domain.
+
+ :ivar created: A boolean which is True if the domain is
+ created. It can take several minutes to initialize a domain
+ when CreateDomain is called. Newly created search domains are
+ returned with a False value for Created until domain creation
+ is complete
+
+ :ivar deleted: A boolean which is True if the search domain has
+ been deleted. The system must clean up resources dedicated to
+ the search domain when delete is called. Newly deleted
+ search domains are returned from list_domains with a True
+ value for deleted for several minutes until resource cleanup
+ is complete.
+
+ :ivar processing: True if processing is being done to activate the
+ current domain configuration.
+
+ :ivar num_searchable_docs: The number of documents that have been
+ submittted to the domain and indexed.
+
+ :ivar requires_index_document: True if index_documents needs to be
+ called to activate the current domain configuration.
+
+ :ivar search_instance_count: The number of search instances that are
+ available to process search requests.
+
+ :ivar search_instance_type: The instance type that is being used to
+ process search requests.
+
+ :ivar search_partition_count: The number of partitions across which
+ the search index is spread.
+ """
+
+ def __init__(self, layer1, data):
+ """
+ Constructor - Create a domain object from a layer1 and data params
+
+ :type layer1: :class:`boto.cloudsearch2.layer1.Layer1` object
+ :param layer1: A :class:`boto.cloudsearch2.layer1.Layer1` object
+ which is used to perform operations on the domain.
+ """
+ self.layer1 = layer1
+ self.update_from_data(data)
+
+ def update_from_data(self, data):
+ self.created = data['Created']
+ self.deleted = data['Deleted']
+ self.processing = data['Processing']
+ self.requires_index_documents = data['RequiresIndexDocuments']
+ self.domain_id = data['DomainId']
+ self.domain_name = data['DomainName']
+ self.search_instance_count = data['SearchInstanceCount']
+ self.search_instance_type = data.get('SearchInstanceType', None)
+ self.search_partition_count = data['SearchPartitionCount']
+ self._doc_service = data['DocService']
+ self._service_arn = data['ARN']
+ self._search_service = data['SearchService']
+
+ @property
+ def service_arn(self):
+ return self._service_arn
+
+ @property
+ def doc_service_endpoint(self):
+ return self._doc_service['Endpoint']
+
+ @property
+ def search_service_endpoint(self):
+ return self._search_service['Endpoint']
+
+ @property
+ def created(self):
+ return self._created
+
+ @created.setter
+ def created(self, value):
+ self._created = handle_bool(value)
+
+ @property
+ def deleted(self):
+ return self._deleted
+
+ @deleted.setter
+ def deleted(self, value):
+ self._deleted = handle_bool(value)
+
+ @property
+ def processing(self):
+ return self._processing
+
+ @processing.setter
+ def processing(self, value):
+ self._processing = handle_bool(value)
+
+ @property
+ def requires_index_documents(self):
+ return self._requires_index_documents
+
+ @requires_index_documents.setter
+ def requires_index_documents(self, value):
+ self._requires_index_documents = handle_bool(value)
+
+ @property
+ def search_partition_count(self):
+ return self._search_partition_count
+
+ @search_partition_count.setter
+ def search_partition_count(self, value):
+ self._search_partition_count = int(value)
+
+ @property
+ def search_instance_count(self):
+ return self._search_instance_count
+
+ @search_instance_count.setter
+ def search_instance_count(self, value):
+ self._search_instance_count = int(value)
+
+ @property
+ def name(self):
+ return self.domain_name
+
+ @property
+ def id(self):
+ return self.domain_id
+
+ def delete(self):
+ """
+ Delete this domain and all index data associated with it.
+ """
+ return self.layer1.delete_domain(self.name)
+
+ def get_analysis_schemes(self):
+ """
+ Return a list of Analysis Scheme objects.
+ """
+ return self.layer1.describe_analysis_schemes(self.name)
+
+ def get_availability_options(self):
+ """
+ Return a :class:`boto.cloudsearch2.option.AvailabilityOptionsStatus`
+ object representing the currently defined availability options for
+ the domain.
+ :return: OptionsStatus object
+ :rtype: :class:`boto.cloudsearch2.option.AvailabilityOptionsStatus`
+ object
+ """
+ return AvailabilityOptionsStatus(
+ self, refresh_fn=self.layer1.describe_availability_options,
+ refresh_key=['DescribeAvailabilityOptionsResponse',
+ 'DescribeAvailabilityOptionsResult',
+ 'AvailabilityOptions'],
+ save_fn=self.layer1.update_availability_options)
+
+ def get_scaling_options(self):
+ """
+ Return a :class:`boto.cloudsearch2.option.ScalingParametersStatus`
+ object representing the currently defined scaling options for the
+ domain.
+ :return: ScalingParametersStatus object
+ :rtype: :class:`boto.cloudsearch2.option.ScalingParametersStatus`
+ object
+ """
+ return ScalingParametersStatus(
+ self, refresh_fn=self.layer1.describe_scaling_parameters,
+ refresh_key=['DescribeScalingParametersResponse',
+ 'DescribeScalingParametersResult',
+ 'ScalingParameters'],
+ save_fn=self.layer1.update_scaling_parameters)
+
+ def get_access_policies(self):
+ """
+ Return a :class:`boto.cloudsearch2.option.ServicePoliciesStatus`
+ object representing the currently defined access policies for the
+ domain.
+ :return: ServicePoliciesStatus object
+ :rtype: :class:`boto.cloudsearch2.option.ServicePoliciesStatus` object
+ """
+ return ServicePoliciesStatus(
+ self, refresh_fn=self.layer1.describe_service_access_policies,
+ refresh_key=['DescribeServiceAccessPoliciesResponse',
+ 'DescribeServiceAccessPoliciesResult',
+ 'AccessPolicies'],
+ save_fn=self.layer1.update_service_access_policies)
+
+ def index_documents(self):
+ """
+ Tells the search domain to start indexing its documents using
+ the latest text processing options and IndexFields. This
+ operation must be invoked to make options whose OptionStatus
+ has OptionState of RequiresIndexDocuments visible in search
+ results.
+ """
+ self.layer1.index_documents(self.name)
+
+ def get_index_fields(self, field_names=None):
+ """
+ Return a list of index fields defined for this domain.
+ :return: list of IndexFieldStatus objects
+ :rtype: list of :class:`boto.cloudsearch2.option.IndexFieldStatus`
+ object
+ """
+ data = self.layer1.describe_index_fields(self.name, field_names)
+
+ data = (data['DescribeIndexFieldsResponse']
+ ['DescribeIndexFieldsResult']
+ ['IndexFields'])
+
+ return [IndexFieldStatus(self, d) for d in data]
+
+ def create_index_field(self, field_name, field_type,
+ default='', facet=False, returnable=False,
+ searchable=False, sortable=False,
+ highlight=False, source_field=None,
+ analysis_scheme=None):
+ """
+ Defines an ``IndexField``, either replacing an existing
+ definition or creating a new one.
+
+ :type field_name: string
+ :param field_name: The name of a field in the search index.
+
+ :type field_type: string
+ :param field_type: The type of field. Valid values are
+ int | double | literal | text | date | latlon |
+ int-array | double-array | literal-array | text-array | date-array
+
+ :type default: string or int
+ :param default: The default value for the field. If the
+ field is of type ``int`` this should be an integer value.
+ Otherwise, it's a string.
+
+ :type facet: bool
+ :param facet: A boolean to indicate whether facets
+ are enabled for this field or not. Does not apply to
+ fields of type ``int, int-array, text, text-array``.
+
+ :type returnable: bool
+ :param returnable: A boolean to indicate whether values
+ of this field can be returned in search results or
+ used in ranking.
+
+ :type searchable: bool
+ :param searchable: A boolean to indicate whether search
+ is enabled for this field or not.
+
+ :type sortable: bool
+ :param sortable: A boolean to indicate whether sorting
+ is enabled for this field or not. Does not apply to
+ fields of array types.
+
+ :type highlight: bool
+ :param highlight: A boolean to indicate whether highlighting
+ is enabled for this field or not. Does not apply to
+ fields of type ``double, int, date, latlon``
+
+ :type source_field: list of strings or string
+ :param source_field: For array types, this is the list of fields
+ to treat as the source. For singular types, pass a string only.
+
+ :type analysis_scheme: string
+ :param analysis_scheme: The analysis scheme to use for this field.
+ Only applies to ``text | text-array`` field types
+
+ :return: IndexFieldStatus objects
+ :rtype: :class:`boto.cloudsearch2.option.IndexFieldStatus` object
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ index = {
+ 'IndexFieldName': field_name,
+ 'IndexFieldType': field_type
+ }
+ if field_type == 'literal':
+ index['LiteralOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['LiteralOptions']['DefaultValue'] = default
+ if source_field:
+ index['LiteralOptions']['SourceField'] = source_field
+ elif field_type == 'literal-array':
+ index['LiteralArrayOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable
+ }
+ if default:
+ index['LiteralArrayOptions']['DefaultValue'] = default
+ if source_field:
+ index['LiteralArrayOptions']['SourceFields'] = \
+ ','.join(source_field)
+ elif field_type == 'int':
+ index['IntOptions'] = {
+ 'DefaultValue': default,
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['IntOptions']['DefaultValue'] = default
+ if source_field:
+ index['IntOptions']['SourceField'] = source_field
+ elif field_type == 'int-array':
+ index['IntArrayOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable
+ }
+ if default:
+ index['IntArrayOptions']['DefaultValue'] = default
+ if source_field:
+ index['IntArrayOptions']['SourceFields'] = \
+ ','.join(source_field)
+ elif field_type == 'date':
+ index['DateOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['DateOptions']['DefaultValue'] = default
+ if source_field:
+ index['DateOptions']['SourceField'] = source_field
+ elif field_type == 'date-array':
+ index['DateArrayOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable
+ }
+ if default:
+ index['DateArrayOptions']['DefaultValue'] = default
+ if source_field:
+ index['DateArrayOptions']['SourceFields'] = \
+ ','.join(source_field)
+ elif field_type == 'double':
+ index['DoubleOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['DoubleOptions']['DefaultValue'] = default
+ if source_field:
+ index['DoubleOptions']['SourceField'] = source_field
+ elif field_type == 'double-array':
+ index['DoubleArrayOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable
+ }
+ if default:
+ index['DoubleArrayOptions']['DefaultValue'] = default
+ if source_field:
+ index['DoubleArrayOptions']['SourceFields'] = \
+ ','.join(source_field)
+ elif field_type == 'text':
+ index['TextOptions'] = {
+ 'ReturnEnabled': returnable,
+ 'HighlightEnabled': highlight,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['TextOptions']['DefaultValue'] = default
+ if source_field:
+ index['TextOptions']['SourceField'] = source_field
+ if analysis_scheme:
+ index['TextOptions']['AnalysisScheme'] = analysis_scheme
+ elif field_type == 'text-array':
+ index['TextArrayOptions'] = {
+ 'ReturnEnabled': returnable,
+ 'HighlightEnabled': highlight
+ }
+ if default:
+ index['TextArrayOptions']['DefaultValue'] = default
+ if source_field:
+ index['TextArrayOptions']['SourceFields'] = \
+ ','.join(source_field)
+ if analysis_scheme:
+ index['TextArrayOptions']['AnalysisScheme'] = analysis_scheme
+ elif field_type == 'latlon':
+ index['LatLonOptions'] = {
+ 'FacetEnabled': facet,
+ 'ReturnEnabled': returnable,
+ 'SearchEnabled': searchable,
+ 'SortEnabled': sortable
+ }
+ if default:
+ index['LatLonOptions']['DefaultValue'] = default
+ if source_field:
+ index['LatLonOptions']['SourceField'] = source_field
+
+ data = self.layer1.define_index_field(self.name, index)
+
+ data = (data['DefineIndexFieldResponse']
+ ['DefineIndexFieldResult']
+ ['IndexField'])
+
+ return IndexFieldStatus(self, data,
+ self.layer1.describe_index_fields)
+
+ def get_expressions(self, names=None):
+ """
+ Return a list of rank expressions defined for this domain.
+ :return: list of ExpressionStatus objects
+ :rtype: list of :class:`boto.cloudsearch2.option.ExpressionStatus`
+ object
+ """
+ fn = self.layer1.describe_expressions
+ data = fn(self.name, names)
+
+ data = (data['DescribeExpressionsResponse']
+ ['DescribeExpressionsResult']
+ ['Expressions'])
+
+ return [ExpressionStatus(self, d, fn) for d in data]
+
+ def create_expression(self, name, value):
+ """
+ Create a new expression.
+
+ :type name: string
+ :param name: The name of an expression for processing
+ during a search request.
+
+ :type value: string
+ :param value: The expression to evaluate for ranking
+ or thresholding while processing a search request. The
+ Expression syntax is based on JavaScript expressions
+ and supports:
+
+ * Single value, sort enabled numeric fields (int, double, date)
+ * Other expressions
+ * The _score variable, which references a document's relevance
+ score
+ * The _time variable, which references the current epoch time
+ * Integer, floating point, hex, and octal literals
+ * Arithmetic operators: + - * / %
+ * Bitwise operators: | & ^ ~ << >> >>>
+ * Boolean operators (including the ternary operator): && || ! ?:
+ * Comparison operators: < <= == >= >
+ * Mathematical functions: abs ceil exp floor ln log2 log10 logn
+ max min pow sqrt pow
+ * Trigonometric functions: acos acosh asin asinh atan atan2 atanh
+ cos cosh sin sinh tanh tan
+ * The haversin distance function
+
+ Expressions always return an integer value from 0 to the maximum
+ 64-bit signed integer value (2^63 - 1). Intermediate results are
+ calculated as double-precision floating point values and the return
+ value is rounded to the nearest integer. If the expression is
+ invalid or evaluates to a negative value, it returns 0. If the
+ expression evaluates to a value greater than the maximum, it
+ returns the maximum value.
+
+ The source data for an Expression can be the name of an
+ IndexField of type int or double, another Expression or the
+ reserved name _score. The _score source is
+ defined to return as a double from 0 to 10.0 (inclusive) to
+ indicate how relevant a document is to the search request,
+ taking into account repetition of search terms in the
+ document and proximity of search terms to each other in
+ each matching IndexField in the document.
+
+ For more information about using rank expressions to
+ customize ranking, see the Amazon CloudSearch Developer
+ Guide.
+
+ :return: ExpressionStatus object
+ :rtype: :class:`boto.cloudsearch2.option.ExpressionStatus` object
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ data = self.layer1.define_expression(self.name, name, value)
+
+ data = (data['DefineExpressionResponse']
+ ['DefineExpressionResult']
+ ['Expression'])
+
+ return ExpressionStatus(self, data,
+ self.layer1.describe_expressions)
+
+ def get_document_service(self):
+ return DocumentServiceConnection(domain=self)
+
+ def get_search_service(self):
+ return SearchConnection(domain=self)
+
+ def __repr__(self):
+ return '' % self.domain_name
diff --git a/awx/lib/site-packages/boto/cloudsearch2/exceptions.py b/awx/lib/site-packages/boto/cloudsearch2/exceptions.py
new file mode 100644
index 0000000000..c114113963
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/exceptions.py
@@ -0,0 +1,46 @@
+"""
+Exceptions that are specific to the cloudsearch2 module.
+"""
+from boto.exception import BotoServerError
+
+
+class InvalidTypeException(BotoServerError):
+ """
+ Raised when an invalid record type is passed to CloudSearch.
+ """
+ pass
+
+
+class LimitExceededException(BotoServerError):
+ """
+ Raised when a limit has been exceeded.
+ """
+ pass
+
+
+class InternalException(BotoServerError):
+ """
+ A generic server-side error.
+ """
+ pass
+
+
+class DisabledOperationException(BotoServerError):
+ """
+ Raised when an operation has been disabled.
+ """
+ pass
+
+
+class ResourceNotFoundException(BotoServerError):
+ """
+ Raised when a requested resource does not exist.
+ """
+ pass
+
+
+class BaseException(BotoServerError):
+ """
+ A generic server-side error.
+ """
+ pass
diff --git a/awx/lib/site-packages/boto/cloudsearch2/layer1.py b/awx/lib/site-packages/boto/cloudsearch2/layer1.py
new file mode 100644
index 0000000000..fdc9d4c625
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/layer1.py
@@ -0,0 +1,779 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+import boto
+from boto.connection import AWSQueryConnection
+from boto.regioninfo import RegionInfo
+from boto.exception import JSONResponseError
+from boto.cloudsearch2 import exceptions
+from boto.compat import json
+
+
+class CloudSearchConnection(AWSQueryConnection):
+ """
+ Amazon CloudSearch Configuration Service
+ You use the Amazon CloudSearch configuration service to create,
+ configure, and manage search domains. Configuration service
+ requests are submitted using the AWS Query protocol. AWS Query
+ requests are HTTP or HTTPS requests submitted via HTTP GET or POST
+ with a query parameter named Action.
+
+ The endpoint for configuration service requests is region-
+ specific: cloudsearch. region .amazonaws.com. For example,
+ cloudsearch.us-east-1.amazonaws.com. For a current list of
+ supported regions and endpoints, see `Regions and Endpoints`_.
+ """
+ APIVersion = "2013-01-01"
+ DefaultRegionName = "us-east-1"
+ DefaultRegionEndpoint = "cloudsearch.us-east-1.amazonaws.com"
+ ResponseError = JSONResponseError
+
+ _faults = {
+ "InvalidTypeException": exceptions.InvalidTypeException,
+ "LimitExceededException": exceptions.LimitExceededException,
+ "InternalException": exceptions.InternalException,
+ "DisabledOperationException": exceptions.DisabledOperationException,
+ "ResourceNotFoundException": exceptions.ResourceNotFoundException,
+ "BaseException": exceptions.BaseException,
+ }
+
+ def __init__(self, **kwargs):
+ region = kwargs.pop('region', None)
+ if not region:
+ region = RegionInfo(self, self.DefaultRegionName,
+ self.DefaultRegionEndpoint)
+
+ if 'host' not in kwargs or kwargs['host'] is None:
+ kwargs['host'] = region.endpoint
+
+ super(CloudSearchConnection, self).__init__(**kwargs)
+ self.region = region
+
+ def _required_auth_capability(self):
+ return ['hmac-v4']
+
+ def build_suggesters(self, domain_name):
+ """
+ Indexes the search suggestions.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ """
+ params = {'DomainName': domain_name, }
+ return self._make_request(
+ action='BuildSuggesters',
+ verb='POST',
+ path='/', params=params)
+
+ def create_domain(self, domain_name):
+ """
+ Creates a new search domain. For more information, see
+ `Creating a Search Domain`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A name for the domain you are creating. Allowed
+ characters are a-z (lower-case letters), 0-9, and hyphen (-).
+ Domain names must start with a letter or number and be at least 3
+ and no more than 28 characters long.
+
+ """
+ params = {'DomainName': domain_name, }
+ return self._make_request(
+ action='CreateDomain',
+ verb='POST',
+ path='/', params=params)
+
+ def define_analysis_scheme(self, domain_name, analysis_scheme):
+ """
+ Configures an analysis scheme for a domain. An analysis scheme
+ defines language-specific text processing options for a `text`
+ field. For more information, see `Configuring Analysis
+ Schemes`_ in the Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type analysis_scheme: dict
+ :param analysis_scheme: Configuration information for an analysis
+ scheme. Each analysis scheme has a unique name and specifies the
+ language of the text to be processed. The following options can be
+ configured for an analysis scheme: `Synonyms`, `Stopwords`,
+ `StemmingDictionary`, and `AlgorithmicStemming`.
+
+ """
+ params = {'DomainName': domain_name, }
+ self.build_complex_param(params, 'AnalysisScheme',
+ analysis_scheme)
+ return self._make_request(
+ action='DefineAnalysisScheme',
+ verb='POST',
+ path='/', params=params)
+
+ def define_expression(self, domain_name, expression):
+ """
+ Configures an `Expression` for the search domain. Used to
+ create new expressions and modify existing ones. If the
+ expression exists, the new configuration replaces the old one.
+ For more information, see `Configuring Expressions`_ in the
+ Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type expression: dict
+ :param expression: A named expression that can be evaluated at search
+ time. Can be used for sorting and filtering search results and
+ constructing other expressions.
+
+ """
+ params = {'DomainName': domain_name, }
+ self.build_complex_param(params, 'Expression',
+ expression)
+ return self._make_request(
+ action='DefineExpression',
+ verb='POST',
+ path='/', params=params)
+
+ def define_index_field(self, domain_name, index_field):
+ """
+ Configures an `IndexField` for the search domain. Used to
+ create new fields and modify existing ones. You must specify
+ the name of the domain you are configuring and an index field
+ configuration. The index field configuration specifies a
+ unique name, the index field type, and the options you want to
+ configure for the field. The options you can specify depend on
+ the `IndexFieldType`. If the field exists, the new
+ configuration replaces the old one. For more information, see
+ `Configuring Index Fields`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type index_field: dict
+ :param index_field: The index field and field options you want to
+ configure.
+
+ """
+ params = {'DomainName': domain_name, }
+ self.build_complex_param(params, 'IndexField',
+ index_field)
+ return self._make_request(
+ action='DefineIndexField',
+ verb='POST',
+ path='/', params=params)
+
+ def define_suggester(self, domain_name, suggester):
+ """
+ Configures a suggester for a domain. A suggester enables you
+ to display possible matches before users finish typing their
+ queries. When you configure a suggester, you must specify the
+ name of the text field you want to search for possible matches
+ and a unique name for the suggester. For more information, see
+ `Getting Search Suggestions`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type suggester: dict
+ :param suggester: Configuration information for a search suggester.
+ Each suggester has a unique name and specifies the text field you
+ want to use for suggestions. The following options can be
+ configured for a suggester: `FuzzyMatching`, `SortExpression`.
+
+ """
+ params = {'DomainName': domain_name, }
+ self.build_complex_param(params, 'Suggester',
+ suggester)
+ return self._make_request(
+ action='DefineSuggester',
+ verb='POST',
+ path='/', params=params)
+
+ def delete_analysis_scheme(self, domain_name, analysis_scheme_name):
+ """
+ Deletes an analysis scheme. For more information, see
+ `Configuring Analysis Schemes`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type analysis_scheme_name: string
+ :param analysis_scheme_name: The name of the analysis scheme you want
+ to delete.
+
+ """
+ params = {
+ 'DomainName': domain_name,
+ 'AnalysisSchemeName': analysis_scheme_name,
+ }
+ return self._make_request(
+ action='DeleteAnalysisScheme',
+ verb='POST',
+ path='/', params=params)
+
+ def delete_domain(self, domain_name):
+ """
+ Permanently deletes a search domain and all of its data. Once
+ a domain has been deleted, it cannot be recovered. For more
+ information, see `Deleting a Search Domain`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to permanently
+ delete.
+
+ """
+ params = {'DomainName': domain_name, }
+ return self._make_request(
+ action='DeleteDomain',
+ verb='POST',
+ path='/', params=params)
+
+ def delete_expression(self, domain_name, expression_name):
+ """
+ Removes an `Expression` from the search domain. For more
+ information, see `Configuring Expressions`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type expression_name: string
+ :param expression_name: The name of the `Expression` to delete.
+
+ """
+ params = {
+ 'DomainName': domain_name,
+ 'ExpressionName': expression_name,
+ }
+ return self._make_request(
+ action='DeleteExpression',
+ verb='POST',
+ path='/', params=params)
+
+ def delete_index_field(self, domain_name, index_field_name):
+ """
+ Removes an `IndexField` from the search domain. For more
+ information, see `Configuring Index Fields`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type index_field_name: string
+ :param index_field_name: The name of the index field your want to
+ remove from the domain's indexing options.
+
+ """
+ params = {
+ 'DomainName': domain_name,
+ 'IndexFieldName': index_field_name,
+ }
+ return self._make_request(
+ action='DeleteIndexField',
+ verb='POST',
+ path='/', params=params)
+
+ def delete_suggester(self, domain_name, suggester_name):
+ """
+ Deletes a suggester. For more information, see `Getting Search
+ Suggestions`_ in the Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type suggester_name: string
+ :param suggester_name: Specifies the name of the suggester you want to
+ delete.
+
+ """
+ params = {
+ 'DomainName': domain_name,
+ 'SuggesterName': suggester_name,
+ }
+ return self._make_request(
+ action='DeleteSuggester',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_analysis_schemes(self, domain_name,
+ analysis_scheme_names=None, deployed=None):
+ """
+ Gets the analysis schemes configured for a domain. An analysis
+ scheme defines language-specific text processing options for a
+ `text` field. Can be limited to specific analysis schemes by
+ name. By default, shows all analysis schemes and includes any
+ pending changes to the configuration. Set the `Deployed`
+ option to `True` to show the active configuration and exclude
+ pending changes. For more information, see `Configuring
+ Analysis Schemes`_ in the Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type analysis_scheme_names: list
+ :param analysis_scheme_names: The analysis schemes you want to
+ describe.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if analysis_scheme_names is not None:
+ self.build_list_params(params,
+ analysis_scheme_names,
+ 'AnalysisSchemeNames.member')
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeAnalysisSchemes',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_availability_options(self, domain_name, deployed=None):
+ """
+ Gets the availability options configured for a domain. By
+ default, shows the configuration with any pending changes. Set
+ the `Deployed` option to `True` to show the active
+ configuration and exclude pending changes. For more
+ information, see `Configuring Availability Options`_ in the
+ Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeAvailabilityOptions',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_domains(self, domain_names=None):
+ """
+ Gets information about the search domains owned by this
+ account. Can be limited to specific domains. Shows all domains
+ by default. For more information, see `Getting Information
+ about a Search Domain`_ in the Amazon CloudSearch Developer
+ Guide .
+
+ :type domain_names: list
+ :param domain_names: The names of the domains you want to include in
+ the response.
+
+ """
+ params = {}
+ if domain_names is not None:
+ self.build_list_params(params,
+ domain_names,
+ 'DomainNames.member')
+ return self._make_request(
+ action='DescribeDomains',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_expressions(self, domain_name, expression_names=None,
+ deployed=None):
+ """
+ Gets the expressions configured for the search domain. Can be
+ limited to specific expressions by name. By default, shows all
+ expressions and includes any pending changes to the
+ configuration. Set the `Deployed` option to `True` to show the
+ active configuration and exclude pending changes. For more
+ information, see `Configuring Expressions`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type expression_names: list
+ :param expression_names: Limits the `DescribeExpressions` response to
+ the specified expressions. If not specified, all expressions are
+ shown.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if expression_names is not None:
+ self.build_list_params(params,
+ expression_names,
+ 'ExpressionNames.member')
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeExpressions',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_index_fields(self, domain_name, field_names=None,
+ deployed=None):
+ """
+ Gets information about the index fields configured for the
+ search domain. Can be limited to specific fields by name. By
+ default, shows all fields and includes any pending changes to
+ the configuration. Set the `Deployed` option to `True` to show
+ the active configuration and exclude pending changes. For more
+ information, see `Getting Domain Information`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type field_names: list
+ :param field_names: A list of the index fields you want to describe. If
+ not specified, information is returned for all configured index
+ fields.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if field_names is not None:
+ self.build_list_params(params,
+ field_names,
+ 'FieldNames.member')
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeIndexFields',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_scaling_parameters(self, domain_name):
+ """
+ Gets the scaling parameters configured for a domain. A
+ domain's scaling parameters specify the desired search
+ instance type and replication count. For more information, see
+ `Configuring Scaling Options`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ """
+ params = {'DomainName': domain_name, }
+ return self._make_request(
+ action='DescribeScalingParameters',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_service_access_policies(self, domain_name, deployed=None):
+ """
+ Gets information about the access policies that control access
+ to the domain's document and search endpoints. By default,
+ shows the configuration with any pending changes. Set the
+ `Deployed` option to `True` to show the active configuration
+ and exclude pending changes. For more information, see
+ `Configuring Access for a Search Domain`_ in the Amazon
+ CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeServiceAccessPolicies',
+ verb='POST',
+ path='/', params=params)
+
+ def describe_suggesters(self, domain_name, suggester_names=None,
+ deployed=None):
+ """
+ Gets the suggesters configured for a domain. A suggester
+ enables you to display possible matches before users finish
+ typing their queries. Can be limited to specific suggesters by
+ name. By default, shows all suggesters and includes any
+ pending changes to the configuration. Set the `Deployed`
+ option to `True` to show the active configuration and exclude
+ pending changes. For more information, see `Getting Search
+ Suggestions`_ in the Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: The name of the domain you want to describe.
+
+ :type suggester_names: list
+ :param suggester_names: The suggesters you want to describe.
+
+ :type deployed: boolean
+ :param deployed: Whether to display the deployed configuration (
+ `True`) or include any pending changes ( `False`). Defaults to
+ `False`.
+
+ """
+ params = {'DomainName': domain_name, }
+ if suggester_names is not None:
+ self.build_list_params(params,
+ suggester_names,
+ 'SuggesterNames.member')
+ if deployed is not None:
+ params['Deployed'] = str(
+ deployed).lower()
+ return self._make_request(
+ action='DescribeSuggesters',
+ verb='POST',
+ path='/', params=params)
+
+ def index_documents(self, domain_name):
+ """
+ Tells the search domain to start indexing its documents using
+ the latest indexing options. This operation must be invoked to
+ activate options whose OptionStatus is
+ `RequiresIndexDocuments`.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ """
+ params = {'DomainName': domain_name, }
+ return self._make_request(
+ action='IndexDocuments',
+ verb='POST',
+ path='/', params=params)
+
+ def list_domain_names(self):
+ """
+ Lists all search domains owned by an account.
+
+
+ """
+ params = {}
+ return self._make_request(
+ action='ListDomainNames',
+ verb='POST',
+ path='/', params=params)
+
+ def update_availability_options(self, domain_name, multi_az):
+ """
+ Configures the availability options for a domain. Enabling the
+ Multi-AZ option expands an Amazon CloudSearch domain to an
+ additional Availability Zone in the same Region to increase
+ fault tolerance in the event of a service disruption. Changes
+ to the Multi-AZ option can take about half an hour to become
+ active. For more information, see `Configuring Availability
+ Options`_ in the Amazon CloudSearch Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type multi_az: boolean
+ :param multi_az: You expand an existing search domain to a second
+ Availability Zone by setting the Multi-AZ option to true.
+ Similarly, you can turn off the Multi-AZ option to downgrade the
+ domain to a single Availability Zone by setting the Multi-AZ option
+ to `False`.
+
+ """
+ params = {'DomainName': domain_name, 'MultiAZ': multi_az, }
+ return self._make_request(
+ action='UpdateAvailabilityOptions',
+ verb='POST',
+ path='/', params=params)
+
+ def update_scaling_parameters(self, domain_name, scaling_parameters):
+ """
+ Configures scaling parameters for a domain. A domain's scaling
+ parameters specify the desired search instance type and
+ replication count. Amazon CloudSearch will still automatically
+ scale your domain based on the volume of data and traffic, but
+ not below the desired instance type and replication count. If
+ the Multi-AZ option is enabled, these values control the
+ resources used per Availability Zone. For more information,
+ see `Configuring Scaling Options`_ in the Amazon CloudSearch
+ Developer Guide .
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type scaling_parameters: dict
+ :param scaling_parameters: The desired instance type and desired number
+ of replicas of each index partition.
+
+ """
+ params = {'DomainName': domain_name, }
+ self.build_complex_param(params, 'ScalingParameters',
+ scaling_parameters)
+ return self._make_request(
+ action='UpdateScalingParameters',
+ verb='POST',
+ path='/', params=params)
+
+ def update_service_access_policies(self, domain_name, access_policies):
+ """
+ Configures the access rules that control access to the
+ domain's document and search endpoints. For more information,
+ see ` Configuring Access for an Amazon CloudSearch Domain`_.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a domain.
+ Domain names are unique across the domains owned by an account
+ within an AWS region. Domain names start with a letter or number
+ and can contain the following characters: a-z (lowercase), 0-9, and
+ - (hyphen).
+
+ :type access_policies: string
+ :param access_policies: The access rules you want to configure. These
+ rules replace any existing rules.
+
+ """
+ params = {
+ 'DomainName': domain_name,
+ 'AccessPolicies': access_policies,
+ }
+ return self._make_request(
+ action='UpdateServiceAccessPolicies',
+ verb='POST',
+ path='/', params=params)
+
+ def build_complex_param(self, params, label, value):
+ """Serialize a structure.
+
+ For example::
+
+ param_type = 'structure'
+ label = 'IndexField'
+ value = {'IndexFieldName': 'a', 'IntOptions': {'DefaultValue': 5}}
+
+ would result in the params dict being updated with these params::
+
+ IndexField.IndexFieldName = a
+ IndexField.IntOptions.DefaultValue = 5
+
+ :type params: dict
+ :param params: The params dict. The complex list params
+ will be added to this dict.
+
+ :type label: str
+ :param label: String label for param key
+
+ :type value: any
+ :param value: The value to serialize
+ """
+ for k, v in value.items():
+ if isinstance(v, dict):
+ for k2, v2 in v.items():
+ self.build_complex_param(params, label + '.' + k, v)
+ elif isinstance(v, bool):
+ params['%s.%s' % (label, k)] = v and 'true' or 'false'
+ else:
+ params['%s.%s' % (label, k)] = v
+
+ def _make_request(self, action, verb, path, params):
+ params['ContentType'] = 'JSON'
+ response = self.make_request(action=action, verb='POST',
+ path='/', params=params)
+ body = response.read().decode('utf-8')
+ boto.log.debug(body)
+ if response.status == 200:
+ return json.loads(body)
+ else:
+ json_body = json.loads(body)
+ fault_name = json_body.get('Error', {}).get('Code', None)
+ exception_class = self._faults.get(fault_name, self.ResponseError)
+ raise exception_class(response.status, response.reason,
+ body=json_body)
diff --git a/awx/lib/site-packages/boto/cloudsearch2/layer2.py b/awx/lib/site-packages/boto/cloudsearch2/layer2.py
new file mode 100644
index 0000000000..c4840482a0
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/layer2.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+from boto.cloudsearch2.layer1 import CloudSearchConnection
+from boto.cloudsearch2.domain import Domain
+from boto.compat import six
+
+
+class Layer2(object):
+
+ def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
+ is_secure=True, port=None, proxy=None, proxy_port=None,
+ host=None, debug=0, session_token=None, region=None,
+ validate_certs=True):
+
+ if isinstance(region, six.string_types):
+ import boto.cloudsearch2
+ for region_info in boto.cloudsearch2.regions():
+ if region_info.name == region:
+ region = region_info
+ break
+
+ self.layer1 = CloudSearchConnection(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ is_secure=is_secure,
+ port=port,
+ proxy=proxy,
+ proxy_port=proxy_port,
+ host=host,
+ debug=debug,
+ security_token=session_token,
+ region=region,
+ validate_certs=validate_certs)
+
+ def list_domains(self, domain_names=None):
+ """
+ Return a list of objects for each domain defined in the
+ current account.
+ :rtype: list of :class:`boto.cloudsearch2.domain.Domain`
+ """
+ domain_data = self.layer1.describe_domains(domain_names)
+
+ domain_data = (domain_data['DescribeDomainsResponse']
+ ['DescribeDomainsResult']
+ ['DomainStatusList'])
+
+ return [Domain(self.layer1, data) for data in domain_data]
+
+ def create_domain(self, domain_name):
+ """
+ Create a new CloudSearch domain and return the corresponding object.
+ :return: Domain object, or None if the domain isn't found
+ :rtype: :class:`boto.cloudsearch2.domain.Domain`
+ """
+ data = self.layer1.create_domain(domain_name)
+ return Domain(self.layer1, data['CreateDomainResponse']
+ ['CreateDomainResult']
+ ['DomainStatus'])
+
+ def lookup(self, domain_name):
+ """
+ Lookup a single domain
+ :param domain_name: The name of the domain to look up
+ :type domain_name: str
+
+ :return: Domain object, or None if the domain isn't found
+ :rtype: :class:`boto.cloudsearch2.domain.Domain`
+ """
+ domains = self.list_domains(domain_names=[domain_name])
+ if len(domains) > 0:
+ return domains[0]
diff --git a/awx/lib/site-packages/boto/cloudsearch2/optionstatus.py b/awx/lib/site-packages/boto/cloudsearch2/optionstatus.py
new file mode 100644
index 0000000000..0a45bea4f0
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/optionstatus.py
@@ -0,0 +1,233 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+from boto.compat import json
+
+
+class OptionStatus(dict):
+ """
+ Presents a combination of status field (defined below) which are
+ accessed as attributes and option values which are stored in the
+ native Python dictionary. In this class, the option values are
+ merged from a JSON object that is stored as the Option part of
+ the object.
+
+ :ivar domain_name: The name of the domain this option is associated with.
+ :ivar create_date: A timestamp for when this option was created.
+ :ivar state: The state of processing a change to an option.
+ Possible values:
+
+ * RequiresIndexDocuments: the option's latest value will not
+ be visible in searches until IndexDocuments has been called
+ and indexing is complete.
+ * Processing: the option's latest value is not yet visible in
+ all searches but is in the process of being activated.
+ * Active: the option's latest value is completely visible.
+
+ :ivar update_date: A timestamp for when this option was updated.
+ :ivar update_version: A unique integer that indicates when this
+ option was last updated.
+ """
+
+ def __init__(self, domain, data=None, refresh_fn=None, refresh_key=None,
+ save_fn=None):
+ self.domain = domain
+ self.refresh_fn = refresh_fn
+ self.refresh_key = refresh_key
+ self.save_fn = save_fn
+ self.refresh(data)
+
+ def _update_status(self, status):
+ self.creation_date = status['CreationDate']
+ self.status = status['State']
+ self.update_date = status['UpdateDate']
+ self.update_version = int(status['UpdateVersion'])
+
+ def _update_options(self, options):
+ if options:
+ self.update(options)
+
+ def refresh(self, data=None):
+ """
+ Refresh the local state of the object. You can either pass
+ new state data in as the parameter ``data`` or, if that parameter
+ is omitted, the state data will be retrieved from CloudSearch.
+ """
+ if not data:
+ if self.refresh_fn:
+ data = self.refresh_fn(self.domain.name)
+
+ if data and self.refresh_key:
+ # Attempt to pull out the right nested bag of data
+ for key in self.refresh_key:
+ data = data[key]
+ if data:
+ self._update_status(data['Status'])
+ self._update_options(data['Options'])
+
+ def to_json(self):
+ """
+ Return the JSON representation of the options as a string.
+ """
+ return json.dumps(self)
+
+ def save(self):
+ """
+ Write the current state of the local object back to the
+ CloudSearch service.
+ """
+ if self.save_fn:
+ data = self.save_fn(self.domain.name, self.to_json())
+ self.refresh(data)
+
+
+class IndexFieldStatus(OptionStatus):
+ def save(self):
+ pass
+
+
+class AvailabilityOptionsStatus(OptionStatus):
+ def save(self):
+ pass
+
+
+class ScalingParametersStatus(IndexFieldStatus):
+ pass
+
+
+class ExpressionStatus(IndexFieldStatus):
+ pass
+
+
+class ServicePoliciesStatus(OptionStatus):
+
+ def new_statement(self, arn, ip):
+ """
+ Returns a new policy statement that will allow
+ access to the service described by ``arn`` by the
+ ip specified in ``ip``.
+
+ :type arn: string
+ :param arn: The Amazon Resource Notation identifier for the
+ service you wish to provide access to. This would be
+ either the search service or the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ return {
+ "Effect": "Allow",
+ "Action": "*", # Docs say use GET, but denies unless *
+ "Resource": arn,
+ "Condition": {
+ "IpAddress": {
+ "aws:SourceIp": [ip]
+ }
+ }
+ }
+
+ def _allow_ip(self, arn, ip):
+ if 'Statement' not in self:
+ s = self.new_statement(arn, ip)
+ self['Statement'] = [s]
+ self.save()
+ else:
+ add_statement = True
+ for statement in self['Statement']:
+ if statement['Resource'] == arn:
+ for condition_name in statement['Condition']:
+ if condition_name == 'IpAddress':
+ add_statement = False
+ condition = statement['Condition'][condition_name]
+ if ip not in condition['aws:SourceIp']:
+ condition['aws:SourceIp'].append(ip)
+
+ if add_statement:
+ s = self.new_statement(arn, ip)
+ self['Statement'].append(s)
+ self.save()
+
+ def allow_search_ip(self, ip):
+ """
+ Add the provided ip address or CIDR block to the list of
+ allowable address for the search service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.service_arn
+ self._allow_ip(arn, ip)
+
+ def allow_doc_ip(self, ip):
+ """
+ Add the provided ip address or CIDR block to the list of
+ allowable address for the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.service_arn
+ self._allow_ip(arn, ip)
+
+ def _disallow_ip(self, arn, ip):
+ if 'Statement' not in self:
+ return
+ need_update = False
+ for statement in self['Statement']:
+ if statement['Resource'] == arn:
+ for condition_name in statement['Condition']:
+ if condition_name == 'IpAddress':
+ condition = statement['Condition'][condition_name]
+ if ip in condition['aws:SourceIp']:
+ condition['aws:SourceIp'].remove(ip)
+ need_update = True
+ if need_update:
+ self.save()
+
+ def disallow_search_ip(self, ip):
+ """
+ Remove the provided ip address or CIDR block from the list of
+ allowable address for the search service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.service_arn
+ self._disallow_ip(arn, ip)
+
+ def disallow_doc_ip(self, ip):
+ """
+ Remove the provided ip address or CIDR block from the list of
+ allowable address for the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.service_arn
+ self._disallow_ip(arn, ip)
diff --git a/awx/lib/site-packages/boto/cloudsearch2/search.py b/awx/lib/site-packages/boto/cloudsearch2/search.py
new file mode 100644
index 0000000000..78ffc168ae
--- /dev/null
+++ b/awx/lib/site-packages/boto/cloudsearch2/search.py
@@ -0,0 +1,363 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from math import ceil
+from boto.compat import json, map, six
+import requests
+
+SIMPLE = 'simple'
+STRUCTURED = 'structured'
+LUCENE = 'lucene'
+DISMAX = 'dismax'
+
+
+class SearchServiceException(Exception):
+ pass
+
+
+class SearchResults(object):
+ def __init__(self, **attrs):
+ self.rid = attrs['status']['rid']
+ self.time_ms = attrs['status']['time-ms']
+ self.hits = attrs['hits']['found']
+ self.docs = attrs['hits']['hit']
+ self.start = attrs['hits']['start']
+ self.query = attrs['query']
+ self.search_service = attrs['search_service']
+
+ self.facets = {}
+ if 'facets' in attrs:
+ for (facet, values) in attrs['facets'].items():
+ if 'buckets' in values:
+ self.facets[facet] = dict((k, v) for (k, v) in map(lambda x: (x['value'], x['count']), values.get('buckets', [])))
+
+ self.num_pages_needed = ceil(self.hits / self.query.real_size)
+
+ def __len__(self):
+ return len(self.docs)
+
+ def __iter__(self):
+ return iter(self.docs)
+
+ def next_page(self):
+ """Call Cloudsearch to get the next page of search results
+
+ :rtype: :class:`boto.cloudsearch2.search.SearchResults`
+ :return: the following page of search results
+ """
+ if self.query.page <= self.num_pages_needed:
+ self.query.start += self.query.real_size
+ self.query.page += 1
+ return self.search_service(self.query)
+ else:
+ raise StopIteration
+
+
+class Query(object):
+
+ RESULTS_PER_PAGE = 500
+
+ def __init__(self, q=None, parser=None, fq=None, expr=None,
+ return_fields=None, size=10, start=0, sort=None,
+ facet=None, highlight=None, partial=None, options=None):
+
+ self.q = q
+ self.parser = parser
+ self.fq = fq
+ self.expr = expr or {}
+ self.sort = sort or []
+ self.return_fields = return_fields or []
+ self.start = start
+ self.facet = facet or {}
+ self.highlight = highlight or {}
+ self.partial = partial
+ self.options = options
+ self.page = 0
+ self.update_size(size)
+
+ def update_size(self, new_size):
+ self.size = new_size
+ self.real_size = Query.RESULTS_PER_PAGE if (self.size >
+ Query.RESULTS_PER_PAGE or self.size == 0) else self.size
+
+ def to_params(self):
+ """Transform search parameters from instance properties to a dictionary
+
+ :rtype: dict
+ :return: search parameters
+ """
+ params = {'start': self.start, 'size': self.real_size}
+
+ if self.q:
+ params['q'] = self.q
+
+ if self.parser:
+ params['q.parser'] = self.parser
+
+ if self.fq:
+ params['fq'] = self.fq
+
+ if self.expr:
+ for k, v in six.iteritems(self.expr):
+ params['expr.%s' % k] = v
+
+ if self.facet:
+ for k, v in six.iteritems(self.facet):
+ if not isinstance(v, six.string_types):
+ v = json.dumps(v)
+ params['facet.%s' % k] = v
+
+ if self.highlight:
+ for k, v in six.iteritems(self.highlight):
+ params['highlight.%s' % k] = v
+
+ if self.options:
+ params['options'] = self.options
+
+ if self.return_fields:
+ params['return'] = ','.join(self.return_fields)
+
+ if self.partial is not None:
+ params['partial'] = self.partial
+
+ if self.sort:
+ params['sort'] = ','.join(self.sort)
+
+ return params
+
+
+class SearchConnection(object):
+
+ def __init__(self, domain=None, endpoint=None):
+ self.domain = domain
+ self.endpoint = endpoint
+ self.session = requests.Session()
+
+ if not endpoint:
+ self.endpoint = domain.search_service_endpoint
+
+ def build_query(self, q=None, parser=None, fq=None, rank=None, return_fields=None,
+ size=10, start=0, facet=None, highlight=None, sort=None,
+ partial=None, options=None):
+ return Query(q=q, parser=parser, fq=fq, expr=rank, return_fields=return_fields,
+ size=size, start=start, facet=facet, highlight=highlight,
+ sort=sort, partial=partial, options=options)
+
+ def search(self, q=None, parser=None, fq=None, rank=None, return_fields=None,
+ size=10, start=0, facet=None, highlight=None, sort=None, partial=None,
+ options=None):
+ """
+ Send a query to CloudSearch
+
+ Each search query should use at least the q or bq argument to specify
+ the search parameter. The other options are used to specify the
+ criteria of the search.
+
+ :type q: string
+ :param q: A string to search the default search fields for.
+
+ :type parser: string
+ :param parser: The parser to use. 'simple', 'structured', 'lucene', 'dismax'
+
+ :type fq: string
+ :param fq: The filter query to use.
+
+ :type sort: List of strings
+ :param sort: A list of fields or rank expressions used to order the
+ search results. Order is handled by adding 'desc' or 'asc' after the field name.
+ ``['year desc', 'author asc']``
+
+ :type return_fields: List of strings
+ :param return_fields: A list of fields which should be returned by the
+ search. If this field is not specified, only IDs will be returned.
+ ``['headline']``
+
+ :type size: int
+ :param size: Number of search results to specify
+
+ :type start: int
+ :param start: Offset of the first search result to return (can be used
+ for paging)
+
+ :type facet: dict
+ :param facet: Dictionary of fields for which facets should be returned
+ The facet value is string of JSON options
+ ``{'year': '{sort:"bucket", size:3}', 'genres': '{buckets:["Action","Adventure","Sci-Fi"]}'}``
+
+ :type highlight: dict
+ :param highlight: Dictionary of fields for which highlights should be returned
+ The facet value is string of JSON options
+ ``{'genres': '{format:'text',max_phrases:2,pre_tag:'',post_tag:''}'}``
+
+ :type partial: bool
+ :param partial: Should partial results from a partioned service be returned if
+ one or more index partitions are unreachable.
+
+ :type options: str
+ :param options: Options for the query parser specified in *parser*.
+ Specified as a string in JSON format.
+ ``{fields: ['title^5', 'description']}``
+
+ :rtype: :class:`boto.cloudsearch2.search.SearchResults`
+ :return: Returns the results of this search
+
+ The following examples all assume we have indexed a set of documents
+ with fields: *author*, *date*, *headline*
+
+ A simple search will look for documents whose default text search
+ fields will contain the search word exactly:
+
+ >>> search(q='Tim') # Return documents with the word Tim in them (but not Timothy)
+
+ A simple search with more keywords will return documents whose default
+ text search fields contain the search strings together or separately.
+
+ >>> search(q='Tim apple') # Will match "tim" and "apple"
+
+ More complex searches require the boolean search operator.
+
+ Wildcard searches can be used to search for any words that start with
+ the search string.
+
+ >>> search(q="'Tim*'") # Return documents with words like Tim or Timothy)
+
+ Search terms can also be combined. Allowed operators are "and", "or",
+ "not", "field", "optional", "token", "phrase", or "filter"
+
+ >>> search(q="(and 'Tim' (field author 'John Smith'))", parser='structured')
+
+ Facets allow you to show classification information about the search
+ results. For example, you can retrieve the authors who have written
+ about Tim with a max of 3
+
+ >>> search(q='Tim', facet={'Author': '{sort:"bucket", size:3}'})
+ """
+
+ query = self.build_query(q=q, parser=parser, fq=fq, rank=rank,
+ return_fields=return_fields,
+ size=size, start=start, facet=facet,
+ highlight=highlight, sort=sort,
+ partial=partial, options=options)
+ return self(query)
+
+ def __call__(self, query):
+ """Make a call to CloudSearch
+
+ :type query: :class:`boto.cloudsearch2.search.Query`
+ :param query: A group of search criteria
+
+ :rtype: :class:`boto.cloudsearch2.search.SearchResults`
+ :return: search results
+ """
+ api_version = '2013-01-01'
+ if self.domain:
+ api_version = self.domain.layer1.APIVersion
+ url = "http://%s/%s/search" % (self.endpoint, api_version)
+ params = query.to_params()
+
+ r = self.session.get(url, params=params)
+ _body = r.content.decode('utf-8')
+ try:
+ data = json.loads(_body)
+ except ValueError:
+ if r.status_code == 403:
+ msg = ''
+ import re
+ g = re.search('403 Forbidden
([^<]+)<', _body)
+ try:
+ msg = ': %s' % (g.groups()[0].strip())
+ except AttributeError:
+ pass
+ raise SearchServiceException('Authentication error from Amazon%s' % msg)
+ raise SearchServiceException("Got non-json response from Amazon. %s" % _body, query)
+
+ if 'messages' in data and 'error' in data:
+ for m in data['messages']:
+ if m['severity'] == 'fatal':
+ raise SearchServiceException("Error processing search %s "
+ "=> %s" % (params, m['message']), query)
+ elif 'error' in data:
+ raise SearchServiceException("Unknown error processing search %s"
+ % json.dumps(data), query)
+
+ data['query'] = query
+ data['search_service'] = self
+
+ return SearchResults(**data)
+
+ def get_all_paged(self, query, per_page):
+ """Get a generator to iterate over all pages of search results
+
+ :type query: :class:`boto.cloudsearch2.search.Query`
+ :param query: A group of search criteria
+
+ :type per_page: int
+ :param per_page: Number of docs in each :class:`boto.cloudsearch2.search.SearchResults` object.
+
+ :rtype: generator
+ :return: Generator containing :class:`boto.cloudsearch2.search.SearchResults`
+ """
+ query.update_size(per_page)
+ page = 0
+ num_pages_needed = 0
+ while page <= num_pages_needed:
+ results = self(query)
+ num_pages_needed = results.num_pages_needed
+ yield results
+ query.start += query.real_size
+ page += 1
+
+ def get_all_hits(self, query):
+ """Get a generator to iterate over all search results
+
+ Transparently handles the results paging from Cloudsearch
+ search results so even if you have many thousands of results
+ you can iterate over all results in a reasonably efficient
+ manner.
+
+ :type query: :class:`boto.cloudsearch2.search.Query`
+ :param query: A group of search criteria
+
+ :rtype: generator
+ :return: All docs matching query
+ """
+ page = 0
+ num_pages_needed = 0
+ while page <= num_pages_needed:
+ results = self(query)
+ num_pages_needed = results.num_pages_needed
+ for doc in results:
+ yield doc
+ query.start += query.real_size
+ page += 1
+
+ def get_num_hits(self, query):
+ """Return the total number of hits for query
+
+ :type query: :class:`boto.cloudsearch2.search.Query`
+ :param query: a group of search criteria
+
+ :rtype: int
+ :return: Total number of hits for query
+ """
+ query.update_size(1)
+ return self(query).hits
diff --git a/awx/lib/site-packages/boto/cloudtrail/layer1.py b/awx/lib/site-packages/boto/cloudtrail/layer1.py
index 861c35904f..0c18fa9066 100644
--- a/awx/lib/site-packages/boto/cloudtrail/layer1.py
+++ b/awx/lib/site-packages/boto/cloudtrail/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.cloudtrail import exceptions
+from boto.compat import json
class CloudTrailConnection(AWSQueryConnection):
@@ -344,7 +340,7 @@ class CloudTrailConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/awx/lib/site-packages/boto/compat.py b/awx/lib/site-packages/boto/compat.py
index 44fbc3b309..a7503f013b 100644
--- a/awx/lib/site-packages/boto/compat.py
+++ b/awx/lib/site-packages/boto/compat.py
@@ -19,6 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+import os
# This allows boto modules to say "from boto.compat import json". This is
# preferred so that all modules don't have to repeat this idiom.
@@ -26,3 +27,41 @@ try:
import simplejson as json
except ImportError:
import json
+
+
+# Switch to use encodebytes, which deprecates encodestring in Python 3
+try:
+ from base64 import encodebytes
+except ImportError:
+ from base64 import encodestring as encodebytes
+
+
+# If running in Google App Engine there is no "user" and
+# os.path.expanduser() will fail. Attempt to detect this case and use a
+# no-op expanduser function in this case.
+try:
+ os.path.expanduser('~')
+ expanduser = os.path.expanduser
+except (AttributeError, ImportError):
+ # This is probably running on App Engine.
+ expanduser = (lambda x: x)
+
+from boto.vendored import six
+
+from boto.vendored.six import BytesIO, StringIO
+from boto.vendored.six.moves import filter, http_client, map, _thread, \
+ urllib, zip
+from boto.vendored.six.moves.queue import Queue
+from boto.vendored.six.moves.urllib.parse import parse_qs, quote, unquote, \
+ urlparse, urlsplit
+from boto.vendored.six.moves.urllib.request import urlopen
+
+if six.PY3:
+ # StandardError was removed, so use the base exception type instead
+ StandardError = Exception
+ long_type = int
+ from configparser import ConfigParser
+else:
+ StandardError = StandardError
+ long_type = long
+ from ConfigParser import SafeConfigParser as ConfigParser
diff --git a/awx/lib/site-packages/boto/connection.py b/awx/lib/site-packages/boto/connection.py
index a178d1fdf6..5fe9c1988d 100644
--- a/awx/lib/site-packages/boto/connection.py
+++ b/awx/lib/site-packages/boto/connection.py
@@ -42,32 +42,26 @@
"""
Handles basic connections to AWS
"""
-
-from __future__ import with_statement
-import base64
from datetime import datetime
import errno
-import httplib
import os
-import Queue
import random
import re
import socket
import sys
import time
-import urllib
-import urlparse
import xml.sax
import copy
-import auth
-import auth_handler
+from boto import auth
+from boto import auth_handler
import boto
import boto.utils
import boto.handler
import boto.cacerts
from boto import config, UserAgent
+from boto.compat import six, http_client, urlparse, quote, encodebytes
from boto.exception import AWSConnectionError
from boto.exception import BotoClientError
from boto.exception import BotoServerError
@@ -165,7 +159,7 @@ class HostConnectionPool(object):
def _conn_ready(self, conn):
"""
- There is a nice state diagram at the top of httplib.py. It
+ There is a nice state diagram at the top of http_client.py. It
indicates that once the response headers have been read (which
_mexe does before adding the connection to the pool), a
response is attached to the connection, and it stays there
@@ -370,11 +364,13 @@ class HTTPRequest(object):
self.headers, self.body))
def authorize(self, connection, **kwargs):
- for key in self.headers:
- val = self.headers[key]
- if isinstance(val, unicode):
- safe = '!"#$%&\'()*+,/:;<=>?@[\\]^`{|}~'
- self.headers[key] = urllib.quote_plus(val.encode('utf-8'), safe)
+ if not getattr(self, '_headers_quoted', False):
+ for key in self.headers:
+ val = self.headers[key]
+ if isinstance(val, six.text_type):
+ safe = '!"#$%&\'()*+,/:;<=>?@[\\]^`{|}~'
+ self.headers[key] = quote(val.encode('utf-8'), safe)
+ setattr(self, '_headers_quoted', True)
connection._auth_handler.add_auth(self, **kwargs)
@@ -384,20 +380,20 @@ class HTTPRequest(object):
if 'Content-Length' not in self.headers:
if 'Transfer-Encoding' not in self.headers or \
self.headers['Transfer-Encoding'] != 'chunked':
- self.headers['Content-Length'] = str(len(self.body))
+ self.headers['Content-Length'] = len(self.body)
-class HTTPResponse(httplib.HTTPResponse):
+class HTTPResponse(http_client.HTTPResponse):
def __init__(self, *args, **kwargs):
- httplib.HTTPResponse.__init__(self, *args, **kwargs)
+ http_client.HTTPResponse.__init__(self, *args, **kwargs)
self._cached_response = ''
def read(self, amt=None):
"""Read the response.
This method does not have the same behavior as
- httplib.HTTPResponse.read. Instead, if this method is called with
+ http_client.HTTPResponse.read. Instead, if this method is called with
no ``amt`` arg, then the response body will be cached. Subsequent
calls to ``read()`` with no args **will return the cached response**.
@@ -410,10 +406,10 @@ class HTTPResponse(httplib.HTTPResponse):
# will return the full body. Note that this behavior only
# happens if the amt arg is not specified.
if not self._cached_response:
- self._cached_response = httplib.HTTPResponse.read(self)
+ self._cached_response = http_client.HTTPResponse.read(self)
return self._cached_response
else:
- return httplib.HTTPResponse.read(self, amt)
+ return http_client.HTTPResponse.read(self, amt)
class AWSAuthConnection(object):
@@ -446,7 +442,7 @@ class AWSAuthConnection(object):
:type https_connection_factory: list or tuple
:param https_connection_factory: A pair of an HTTP connection
factory and the exceptions to catch. The factory should have
- a similar interface to L{httplib.HTTPSConnection}.
+ a similar interface to L{http_client.HTTPSConnection}.
:param str proxy: Address/hostname for a proxy server
@@ -505,9 +501,9 @@ class AWSAuthConnection(object):
self.port = PORTS_BY_SECURITY[is_secure]
self.handle_proxy(proxy, proxy_port, proxy_user, proxy_pass)
- # define exceptions from httplib that we want to catch and retry
- self.http_exceptions = (httplib.HTTPException, socket.error,
- socket.gaierror, httplib.BadStatusLine)
+ # define exceptions from http_client that we want to catch and retry
+ self.http_exceptions = (http_client.HTTPException, socket.error,
+ socket.gaierror, http_client.BadStatusLine)
# define subclasses of the above that are not retryable.
self.http_unretryable_exceptions = []
if HAVE_HTTPS_CONNECTION:
@@ -528,12 +524,12 @@ class AWSAuthConnection(object):
self.host = host
self.path = path
# if the value passed in for debug
- if not isinstance(debug, (int, long)):
+ if not isinstance(debug, six.integer_types):
debug = 0
self.debug = config.getint('Boto', 'debug', debug)
self.host_header = None
- # Timeout used to tell httplib how long to wait for socket timeouts.
+ # Timeout used to tell http_client how long to wait for socket timeouts.
# Default is to leave timeout unchanged, which will in turn result in
# the socket's default global timeout being used. To specify a
# timeout, set http_socket_timeout in Boto config. Regardless,
@@ -651,7 +647,7 @@ class AWSAuthConnection(object):
signature_host = self.host
else:
# This unfortunate little hack can be attributed to
- # a difference in the 2.6 version of httplib. In old
+ # a difference in the 2.6 version of http_client. In old
# versions, it would append ":443" to the hostname sent
# in the Host header and so we needed to make sure we
# did the same when calculating the V2 signature. In 2.6
@@ -693,8 +689,8 @@ class AWSAuthConnection(object):
self.proxy_pass = config.get_value('Boto', 'proxy_pass', None)
if not self.proxy_port and self.proxy:
- print "http_proxy environment variable does not specify " \
- "a port, using default"
+ print("http_proxy environment variable does not specify " \
+ "a port, using default")
self.proxy_port = self.port
self.no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
@@ -755,7 +751,7 @@ class AWSAuthConnection(object):
host, ca_certs=self.ca_certificates_file,
**http_connection_kwargs)
else:
- connection = httplib.HTTPSConnection(host,
+ connection = http_client.HTTPSConnection(host,
**http_connection_kwargs)
else:
boto.log.debug('establishing HTTP connection: kwargs=%s' %
@@ -766,7 +762,7 @@ class AWSAuthConnection(object):
connection = self.https_connection_factory(host,
**http_connection_kwargs)
else:
- connection = httplib.HTTPConnection(host,
+ connection = http_client.HTTPConnection(host,
**http_connection_kwargs)
if self.debug > 1:
connection.set_debuglevel(self.debug)
@@ -788,13 +784,13 @@ class AWSAuthConnection(object):
host = '%s:%d' % (host, port)
else:
host = '%s:%d' % (self.host, self.port)
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- try:
- sock.connect((self.proxy, int(self.proxy_port)))
- if "timeout" in self.http_connection_kwargs:
- sock.settimeout(self.http_connection_kwargs["timeout"])
- except:
- raise
+ # Seems properly to use timeout for connect too
+ timeout = self.http_connection_kwargs.get("timeout")
+ if timeout is not None:
+ sock = socket.create_connection((self.proxy,
+ int(self.proxy_port)), timeout)
+ else:
+ sock = socket.create_connection((self.proxy, int(self.proxy_port)))
boto.log.debug("Proxy connection: CONNECT %s HTTP/1.0\r\n", host)
sock.sendall("CONNECT %s HTTP/1.0\r\n" % host)
sock.sendall("User-Agent: %s\r\n" % UserAgent)
@@ -807,7 +803,7 @@ class AWSAuthConnection(object):
sock.sendall("\r\n")
else:
sock.sendall("\r\n")
- resp = httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug)
+ resp = http_client.HTTPResponse(sock, strict=True, debuglevel=self.debug)
resp.begin()
if resp.status != 200:
@@ -821,7 +817,7 @@ class AWSAuthConnection(object):
# We can safely close the response, it duped the original socket
resp.close()
- h = httplib.HTTPConnection(host)
+ h = http_client.HTTPConnection(host)
if self.https_validate_certificates and HAVE_HTTPS_CONNECTION:
msg = "wrapping ssl socket for proxied connection; "
@@ -843,11 +839,11 @@ class AWSAuthConnection(object):
hostname, cert, 'hostname mismatch')
else:
# Fallback for old Python without ssl.wrap_socket
- if hasattr(httplib, 'ssl'):
- sslSock = httplib.ssl.SSLSocket(sock)
+ if hasattr(http_client, 'ssl'):
+ sslSock = http_client.ssl.SSLSocket(sock)
else:
sslSock = socket.ssl(sock, None, None)
- sslSock = httplib.FakeSocket(sock, sslSock)
+ sslSock = http_client.FakeSocket(sock, sslSock)
# This is a bit unclean
h.sock = sslSock
@@ -858,7 +854,7 @@ class AWSAuthConnection(object):
return path
def get_proxy_auth_header(self):
- auth = base64.encodestring(self.proxy_user + ':' + self.proxy_pass)
+ auth = encodebytes(self.proxy_user + ':' + self.proxy_pass)
return {'Proxy-Authorization': 'Basic %s' % auth}
def set_host_header(self, request):
@@ -899,9 +895,16 @@ class AWSAuthConnection(object):
i = 0
connection = self.get_http_connection(request.host, request.port,
self.is_secure)
+
+ # Convert body to bytes if needed
+ if not isinstance(request.body, bytes) and hasattr(request.body,
+ 'encode'):
+ request.body = request.body.encode('utf-8')
+
while i <= num_retries:
# Use binary exponential backoff to desynchronize client requests.
- next_sleep = random.random() * (2 ** i)
+ next_sleep = min(random.random() * (2 ** i),
+ boto.config.get('Boto', 'max_retry_delay', 60))
try:
# we now re-sign each request before it is retried
boto.log.debug('Token: %s' % self.provider.security_token)
@@ -913,6 +916,7 @@ class AWSAuthConnection(object):
if 's3' not in self._required_auth_capability():
if not getattr(self, 'anon', False):
self.set_host_header(request)
+ boto.log.debug('Final headers: %s' % request.headers)
request.start_time = datetime.now()
if callable(sender):
response = sender(connection, request.method, request.path,
@@ -921,9 +925,10 @@ class AWSAuthConnection(object):
connection.request(request.method, request.path,
request.body, request.headers)
response = connection.getresponse()
+ boto.log.debug('Response headers: %s' % response.getheaders())
location = response.getheader('location')
# -- gross hack --
- # httplib gets confused with chunked responses to HEAD requests
+ # http_client gets confused with chunked responses to HEAD requests
# so I have to fake it out
if request.method == 'HEAD' and getattr(response,
'chunked', False):
@@ -941,6 +946,8 @@ class AWSAuthConnection(object):
msg += 'Retrying in %3.1f seconds' % next_sleep
boto.log.debug(msg)
body = response.read()
+ if isinstance(body, bytes):
+ body = body.decode('utf-8')
elif response.status < 300 or response.status >= 400 or \
not location:
# don't return connection to the pool if response contains
@@ -959,7 +966,7 @@ class AWSAuthConnection(object):
return response
else:
scheme, request.host, request.path, \
- params, query, fragment = urlparse.urlparse(location)
+ params, query, fragment = urlparse(location)
if query:
request.path += '?' + query
# urlparse can return both host and port in netloc, so if
@@ -974,12 +981,12 @@ class AWSAuthConnection(object):
scheme == 'https')
response = None
continue
- except PleaseRetryException, e:
+ except PleaseRetryException as e:
boto.log.debug('encountered a retry exception: %s' % e)
connection = self.new_http_connection(request.host, request.port,
self.is_secure)
response = e.response
- except self.http_exceptions, e:
+ except self.http_exceptions as e:
for unretryable in self.http_unretryable_exceptions:
if isinstance(e, unretryable):
boto.log.debug(
@@ -1089,7 +1096,7 @@ class AWSQueryConnection(AWSAuthConnection):
return self._mexe(http_request)
def build_list_params(self, params, items, label):
- if isinstance(items, basestring):
+ if isinstance(items, six.string_types):
items = [items]
for i in range(1, len(items) + 1):
params['%s.%d' % (label, i)] = items[i - 1]
@@ -1149,6 +1156,8 @@ class AWSQueryConnection(AWSAuthConnection):
elif response.status == 200:
rs = ResultSet(markers)
h = boto.handler.XmlHandler(rs, parent)
+ if isinstance(body, six.text_type):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return rs
else:
@@ -1169,6 +1178,8 @@ class AWSQueryConnection(AWSAuthConnection):
elif response.status == 200:
obj = cls(parent)
h = boto.handler.XmlHandler(obj, parent)
+ if isinstance(body, six.text_type):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return obj
else:
diff --git a/awx/lib/site-packages/boto/datapipeline/layer1.py b/awx/lib/site-packages/boto/datapipeline/layer1.py
index 6635f01c2d..6a3fb9b5d1 100644
--- a/awx/lib/site-packages/boto/datapipeline/layer1.py
+++ b/awx/lib/site-packages/boto/datapipeline/layer1.py
@@ -627,7 +627,7 @@ class DataPipelineConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/awx/lib/site-packages/boto/directconnect/layer1.py b/awx/lib/site-packages/boto/directconnect/layer1.py
index b225e18e50..08197ddfa8 100644
--- a/awx/lib/site-packages/boto/directconnect/layer1.py
+++ b/awx/lib/site-packages/boto/directconnect/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.directconnect import exceptions
+from boto.compat import json
class DirectConnectConnection(AWSQueryConnection):
@@ -619,7 +615,7 @@ class DirectConnectConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/awx/lib/site-packages/boto/dynamodb/batch.py b/awx/lib/site-packages/boto/dynamodb/batch.py
index 6a755a93ab..f30b8425c6 100644
--- a/awx/lib/site-packages/boto/dynamodb/batch.py
+++ b/awx/lib/site-packages/boto/dynamodb/batch.py
@@ -20,6 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+from boto.compat import six
class Batch(object):
@@ -176,7 +177,7 @@ class BatchList(list):
if not self.unprocessed:
return None
- for table_name, table_req in self.unprocessed.iteritems():
+ for table_name, table_req in six.iteritems(self.unprocessed):
table_keys = table_req['Keys']
table = self.layer2.get_table(table_name)
@@ -196,7 +197,6 @@ class BatchList(list):
return self.submit()
-
def submit(self):
res = self.layer2.batch_get_item(self)
if 'UnprocessedKeys' in res:
@@ -259,4 +259,3 @@ class BatchWriteList(list):
table_name, batch_dict = batch.to_dict()
d[table_name] = batch_dict
return d
-
diff --git a/awx/lib/site-packages/boto/dynamodb/item.py b/awx/lib/site-packages/boto/dynamodb/item.py
index 9dcbad0628..a47f22bf0e 100644
--- a/awx/lib/site-packages/boto/dynamodb/item.py
+++ b/awx/lib/site-packages/boto/dynamodb/item.py
@@ -35,7 +35,7 @@ class Item(dict):
:ivar range_key_name: The name of the RangeKey associated with this item.
:ivar table: The Table this item belongs to.
"""
-
+
def __init__(self, table, hash_key=None, range_key=None, attrs=None):
self.table = table
self._updates = None
diff --git a/awx/lib/site-packages/boto/dynamodb/layer1.py b/awx/lib/site-packages/boto/dynamodb/layer1.py
index 317cf43370..0984f71ab4 100644
--- a/awx/lib/site-packages/boto/dynamodb/layer1.py
+++ b/awx/lib/site-packages/boto/dynamodb/layer1.py
@@ -122,14 +122,14 @@ class Layer1(AWSAuthConnection):
boto.log.debug('RequestId: %s' % request_id)
boto.perflog.debug('%s: id=%s time=%sms',
headers['X-Amz-Target'], request_id, int(elapsed))
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
return json.loads(response_body, object_hook=object_hook)
def _retry_handler(self, response, i, next_sleep):
status = None
if response.status == 400:
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
data = json.loads(response_body)
if self.ThruputError in data.get('__type'):
@@ -160,7 +160,7 @@ class Layer1(AWSAuthConnection):
expected_crc32 = response.getheader('x-amz-crc32')
if self._validate_checksums and expected_crc32 is not None:
boto.log.debug('Validating crc32 checksum for body: %s',
- response.read())
+ response.read().decode('utf-8'))
actual_crc32 = crc32(response.read()) & 0xffffffff
expected_crc32 = int(expected_crc32)
if actual_crc32 != expected_crc32:
@@ -173,7 +173,8 @@ class Layer1(AWSAuthConnection):
if i == 0:
next_sleep = 0
else:
- next_sleep = 0.05 * (2 ** i)
+ next_sleep = min(0.05 * (2 ** i),
+ boto.config.get('Boto', 'max_retry_delay', 60))
return next_sleep
def list_tables(self, limit=None, start_table=None):
diff --git a/awx/lib/site-packages/boto/dynamodb/layer2.py b/awx/lib/site-packages/boto/dynamodb/layer2.py
index 743c7055cd..fa0e545f7d 100644
--- a/awx/lib/site-packages/boto/dynamodb/layer2.py
+++ b/awx/lib/site-packages/boto/dynamodb/layer2.py
@@ -264,13 +264,13 @@ class Layer2(object):
"""
dynamodb_key = {}
dynamodb_value = self.dynamizer.encode(hash_key)
- if dynamodb_value.keys()[0] != schema.hash_key_type:
+ if list(dynamodb_value.keys())[0] != schema.hash_key_type:
msg = 'Hashkey must be of type: %s' % schema.hash_key_type
raise TypeError(msg)
dynamodb_key['HashKeyElement'] = dynamodb_value
if range_key is not None:
dynamodb_value = self.dynamizer.encode(range_key)
- if dynamodb_value.keys()[0] != schema.range_key_type:
+ if list(dynamodb_value.keys())[0] != schema.range_key_type:
msg = 'RangeKey must be of type: %s' % schema.range_key_type
raise TypeError(msg)
dynamodb_key['RangeKeyElement'] = dynamodb_value
diff --git a/awx/lib/site-packages/boto/dynamodb/table.py b/awx/lib/site-packages/boto/dynamodb/table.py
index 129b079503..152b95d908 100644
--- a/awx/lib/site-packages/boto/dynamodb/table.py
+++ b/awx/lib/site-packages/boto/dynamodb/table.py
@@ -47,9 +47,9 @@ class TableBatchGenerator(object):
self.consistent_read = consistent_read
def _queue_unprocessed(self, res):
- if not u'UnprocessedKeys' in res:
+ if u'UnprocessedKeys' not in res:
return
- if not self.table.name in res[u'UnprocessedKeys']:
+ if self.table.name not in res[u'UnprocessedKeys']:
return
keys = res[u'UnprocessedKeys'][self.table.name][u'Keys']
@@ -68,7 +68,7 @@ class TableBatchGenerator(object):
res = batch.submit()
# parse the results
- if not self.table.name in res[u'Responses']:
+ if self.table.name not in res[u'Responses']:
continue
self.consumed_units += res[u'Responses'][self.table.name][u'ConsumedCapacityUnits']
for elem in res[u'Responses'][self.table.name][u'Items']:
diff --git a/awx/lib/site-packages/boto/dynamodb/types.py b/awx/lib/site-packages/boto/dynamodb/types.py
index 4c3270ba23..2049c219cc 100644
--- a/awx/lib/site-packages/boto/dynamodb/types.py
+++ b/awx/lib/site-packages/boto/dynamodb/types.py
@@ -27,7 +27,8 @@ Python types and vice-versa.
import base64
from decimal import (Decimal, DecimalException, Context,
Clamped, Overflow, Inexact, Underflow, Rounded)
-from exceptions import DynamoDBNumberError
+from boto.dynamodb.exceptions import DynamoDBNumberError
+from boto.compat import filter, map, six, long_type
DYNAMODB_CONTEXT = Context(
@@ -51,17 +52,25 @@ def float_to_decimal(f):
def is_num(n):
- types = (int, long, float, bool, Decimal)
+ types = (int, long_type, float, bool, Decimal)
return isinstance(n, types) or n in types
-def is_str(n):
- return isinstance(n, basestring) or (isinstance(n, type) and
- issubclass(n, basestring))
+if six.PY2:
+ def is_str(n):
+ return (isinstance(n, basestring) or
+ isinstance(n, type) and issubclass(n, basestring))
+ def is_binary(n):
+ return isinstance(n, Binary)
-def is_binary(n):
- return isinstance(n, Binary)
+else: # PY3
+ def is_str(n):
+ return (isinstance(n, str) or
+ isinstance(n, type) and issubclass(n, str))
+
+ def is_binary(n):
+ return isinstance(n, bytes) # Binary is subclass of bytes.
def serialize_num(val):
@@ -103,7 +112,7 @@ def get_dynamodb_type(val):
dynamodb_type = 'SS'
elif False not in map(is_binary, val):
dynamodb_type = 'BS'
- elif isinstance(val, Binary):
+ elif is_binary(val):
dynamodb_type = 'B'
if dynamodb_type is None:
msg = 'Unsupported type "%s" for value "%s"' % (type(val), val)
@@ -124,43 +133,62 @@ def dynamize_value(val):
elif dynamodb_type == 'S':
val = {dynamodb_type: val}
elif dynamodb_type == 'NS':
- val = {dynamodb_type: map(serialize_num, val)}
+ val = {dynamodb_type: list(map(serialize_num, val))}
elif dynamodb_type == 'SS':
val = {dynamodb_type: [n for n in val]}
elif dynamodb_type == 'B':
+ if isinstance(val, bytes):
+ val = Binary(val)
val = {dynamodb_type: val.encode()}
elif dynamodb_type == 'BS':
val = {dynamodb_type: [n.encode() for n in val]}
return val
-class Binary(object):
- def __init__(self, value):
- if not isinstance(value, basestring):
- raise TypeError('Value must be a string of binary data!')
+if six.PY2:
+ class Binary(object):
+ def __init__(self, value):
+ if not isinstance(value, (bytes, six.text_type)):
+ raise TypeError('Value must be a string of binary data!')
+ if not isinstance(value, bytes):
+ value = value.encode("utf-8")
- self.value = value
+ self.value = value
- def encode(self):
- return base64.b64encode(self.value)
+ def encode(self):
+ return base64.b64encode(self.value).decode('utf-8')
- def __eq__(self, other):
- if isinstance(other, Binary):
- return self.value == other.value
- else:
- return self.value == other
+ def __eq__(self, other):
+ if isinstance(other, Binary):
+ return self.value == other.value
+ else:
+ return self.value == other
- def __ne__(self, other):
- return not self.__eq__(other)
+ def __ne__(self, other):
+ return not self.__eq__(other)
- def __repr__(self):
- return 'Binary(%s)' % self.value
+ def __repr__(self):
+ return 'Binary(%r)' % self.value
- def __str__(self):
- return self.value
+ def __str__(self):
+ return self.value
- def __hash__(self):
- return hash(self.value)
+ def __hash__(self):
+ return hash(self.value)
+else:
+ class Binary(bytes):
+ def encode(self):
+ return base64.b64encode(self).decode('utf-8')
+
+ @property
+ def value(self):
+ # This matches the public API of the Python 2 version,
+ # but just returns itself since it is already a bytes
+ # instance.
+ return bytes(self)
+
+ def __repr__(self):
+ return 'Binary(%r)' % self.value
def item_object_hook(dct):
@@ -244,28 +272,30 @@ class Dynamizer(object):
n = str(float_to_decimal(attr))
else:
n = str(DYNAMODB_CONTEXT.create_decimal(attr))
- if filter(lambda x: x in n, ('Infinity', 'NaN')):
+ if list(filter(lambda x: x in n, ('Infinity', 'NaN'))):
raise TypeError('Infinity and NaN not supported')
return n
- except (TypeError, DecimalException), e:
+ except (TypeError, DecimalException) as e:
msg = '{0} numeric for `{1}`\n{2}'.format(
e.__class__.__name__, attr, str(e) or '')
raise DynamoDBNumberError(msg)
def _encode_s(self, attr):
- if isinstance(attr, unicode):
- attr = attr.encode('utf-8')
- elif not isinstance(attr, str):
+ if isinstance(attr, bytes):
+ attr = attr.decode('utf-8')
+ elif not isinstance(attr, six.text_type):
attr = str(attr)
return attr
def _encode_ns(self, attr):
- return map(self._encode_n, attr)
+ return list(map(self._encode_n, attr))
def _encode_ss(self, attr):
return [self._encode_s(n) for n in attr]
def _encode_b(self, attr):
+ if isinstance(attr, bytes):
+ attr = Binary(attr)
return attr.encode()
def _encode_bs(self, attr):
@@ -279,7 +309,7 @@ class Dynamizer(object):
"""
if len(attr) > 1 or not attr:
return attr
- dynamodb_type = attr.keys()[0]
+ dynamodb_type = list(attr.keys())[0]
if dynamodb_type.lower() == dynamodb_type:
# It's not an actual type, just a single character attr that
# overlaps with the DDB types. Return it.
diff --git a/awx/lib/site-packages/boto/dynamodb2/items.py b/awx/lib/site-packages/boto/dynamodb2/items.py
index 257a745917..463feee793 100644
--- a/awx/lib/site-packages/boto/dynamodb2/items.py
+++ b/awx/lib/site-packages/boto/dynamodb2/items.py
@@ -108,9 +108,11 @@ class Item(object):
def __contains__(self, key):
return key in self._data
- def __nonzero__(self):
+ def __bool__(self):
return bool(self._data)
+ __nonzero__ = __bool__
+
def _determine_alterations(self):
"""
Checks the ``-orig_data`` against the ``_data`` to determine what
@@ -256,7 +258,7 @@ class Item(object):
expects = {}
if fields is None:
- fields = self._data.keys() + self._orig_data.keys()
+ fields = list(self._data.keys()) + list(self._orig_data.keys())
# Only uniques.
fields = set(fields)
diff --git a/awx/lib/site-packages/boto/dynamodb2/layer1.py b/awx/lib/site-packages/boto/dynamodb2/layer1.py
index 44dadecb2b..aa319cfd4e 100644
--- a/awx/lib/site-packages/boto/dynamodb2/layer1.py
+++ b/awx/lib/site-packages/boto/dynamodb2/layer1.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
@@ -37,7 +37,112 @@ class DynamoDBConnection(AWSQueryConnection):
"""
Amazon DynamoDB **Overview**
This is the Amazon DynamoDB API Reference. This guide provides
- descriptions and samples of the Amazon DynamoDB API.
+ descriptions and samples of the low-level DynamoDB API. For
+ information about DynamoDB application development, go to the
+ `Amazon DynamoDB Developer Guide`_.
+
+ Instead of making the requests to the low-level DynamoDB API
+ directly from your application, we recommend that you use the AWS
+ Software Development Kits (SDKs). The easy-to-use libraries in the
+ AWS SDKs make it unnecessary to call the low-level DynamoDB API
+ directly from your application. The libraries take care of request
+ authentication, serialization, and connection management. For more
+ information, go to `Using the AWS SDKs with DynamoDB`_ in the
+ Amazon DynamoDB Developer Guide .
+
+ If you decide to code against the low-level DynamoDB API directly,
+ you will need to write the necessary code to authenticate your
+ requests. For more information on signing your requests, go to
+ `Using the DynamoDB API`_ in the Amazon DynamoDB Developer Guide .
+
+ The following are short descriptions of each low-level API action,
+ organized by function.
+
+ **Managing Tables**
+
+
+
+ + CreateTable - Creates a table with user-specified provisioned
+ throughput settings. You must designate one attribute as the hash
+ primary key for the table; you can optionally designate a second
+ attribute as the range primary key. DynamoDB creates indexes on
+ these key attributes for fast data access. Optionally, you can
+ create one or more secondary indexes, which provide fast data
+ access using non-key attributes.
+ + DescribeTable - Returns metadata for a table, such as table
+ size, status, and index information.
+ + UpdateTable - Modifies the provisioned throughput settings for a
+ table. Optionally, you can modify the provisioned throughput
+ settings for global secondary indexes on the table.
+ + ListTables - Returns a list of all tables associated with the
+ current AWS account and endpoint.
+ + DeleteTable - Deletes a table and all of its indexes.
+
+
+
+ For conceptual information about managing tables, go to `Working
+ with Tables`_ in the Amazon DynamoDB Developer Guide .
+
+ **Reading Data**
+
+
+
+ + GetItem - Returns a set of attributes for the item that has a
+ given primary key. By default, GetItem performs an eventually
+ consistent read; however, applications can specify a strongly
+ consistent read instead.
+ + BatchGetItem - Performs multiple GetItem requests for data items
+ using their primary keys, from one table or multiple tables. The
+ response from BatchGetItem has a size limit of 1 MB and returns a
+ maximum of 100 items. Both eventually consistent and strongly
+ consistent reads can be used.
+ + Query - Returns one or more items from a table or a secondary
+ index. You must provide a specific hash key value. You can narrow
+ the scope of the query using comparison operators against a range
+ key value, or on the index key. Query supports either eventual or
+ strong consistency. A single response has a size limit of 1 MB.
+ + Scan - Reads every item in a table; the result set is eventually
+ consistent. You can limit the number of items returned by
+ filtering the data attributes, using conditional expressions. Scan
+ can be used to enable ad-hoc querying of a table against non-key
+ attributes; however, since this is a full table scan without using
+ an index, Scan should not be used for any application query use
+ case that requires predictable performance.
+
+
+
+ For conceptual information about reading data, go to `Working with
+ Items`_ and `Query and Scan Operations`_ in the Amazon DynamoDB
+ Developer Guide .
+
+ **Modifying Data**
+
+
+
+ + PutItem - Creates a new item, or replaces an existing item with
+ a new item (including all the attributes). By default, if an item
+ in the table already exists with the same primary key, the new
+ item completely replaces the existing item. You can use
+ conditional operators to replace an item only if its attribute
+ values match certain conditions, or to insert a new item only if
+ that item doesn't already exist.
+ + UpdateItem - Modifies the attributes of an existing item. You
+ can also use conditional operators to perform an update only if
+ the item's attribute values match certain conditions.
+ + DeleteItem - Deletes an item in a table by primary key. You can
+ use conditional operators to perform a delete an item only if the
+ item's attribute values match certain conditions.
+ + BatchWriteItem - Performs multiple PutItem and DeleteItem
+ requests across multiple tables in a single request. A failure of
+ any request(s) in the batch will not cause the entire
+ BatchWriteItem operation to fail. Supports batches of up to 25
+ items to put or delete, with a maximum total request size of 1 MB.
+
+
+
+ For conceptual information about modifying data, go to `Working
+ with Items`_ and `Query and Scan Operations`_ in the Amazon
+ DynamoDB Developer Guide .
"""
APIVersion = "2012-08-10"
DefaultRegionName = "us-east-1"
@@ -91,7 +196,7 @@ class DynamoDBConnection(AWSQueryConnection):
items by primary key.
A single operation can retrieve up to 1 MB of data, which can
- comprise as many as 100 items. BatchGetItem will return a
+ contain as many as 100 items. BatchGetItem will return a
partial result if the response size limit is exceeded, the
table's provisioned throughput is exceeded, or an internal
processing failure occurs. If a partial result is returned,
@@ -106,24 +211,38 @@ class DynamoDBConnection(AWSQueryConnection):
include its own logic to assemble the pages of results into
one dataset.
- If no items can be processed because of insufficient
- provisioned throughput on each of the tables involved in the
- request, BatchGetItem throws
- ProvisionedThroughputExceededException .
+ If none of the items can be processed due to insufficient
+ provisioned throughput on all of the tables in the request,
+ then BatchGetItem will throw a
+ ProvisionedThroughputExceededException . If at least one of
+ the items is successfully processed, then BatchGetItem
+ completes successfully, while returning the keys of the unread
+ items in UnprocessedKeys .
+
+ If DynamoDB returns any unprocessed items, you should retry
+ the batch operation on those items. However, we strongly
+ recommend that you use an exponential backoff algorithm . If
+ you retry the batch operation immediately, the underlying read
+ or write requests can still fail due to throttling on the
+ individual tables. If you delay the batch operation using
+ exponential backoff, the individual requests in the batch are
+ much more likely to succeed.
+
+ For more information, go to `Batch Operations and Error
+ Handling`_ in the Amazon DynamoDB Developer Guide.
By default, BatchGetItem performs eventually consistent reads
on every table in the request. If you want strongly consistent
reads instead, you can set ConsistentRead to `True` for any or
all tables.
- In order to minimize response latency, BatchGetItem fetches
+ In order to minimize response latency, BatchGetItem retrieves
items in parallel.
- When designing your application, keep in mind that Amazon
- DynamoDB does not return attributes in any particular order.
- To help parse the response by item, include the primary key
- values for the items in your request in the AttributesToGet
- parameter.
+ When designing your application, keep in mind that DynamoDB
+ does not return attributes in any particular order. To help
+ parse the response by item, include the primary key values for
+ the items in your request in the AttributesToGet parameter.
If a requested item does not exist, it is not returned in the
result. Requests for nonexistent items consume the minimum
@@ -141,17 +260,27 @@ class DynamoDBConnection(AWSQueryConnection):
+ Keys - An array of primary key attribute values that define specific
- items in the table.
+ items in the table. For each primary key, you must provide all of
+ the key attributes. For example, with a hash type primary key, you
+ only need to specify the hash attribute. For a hash-and-range type
+ primary key, you must specify both the hash attribute and the range
+ attribute.
+ AttributesToGet - One or more attributes to be retrieved from the
- table or index. By default, all attributes are returned. If a
- specified attribute is not found, it does not appear in the result.
+ table. By default, all attributes are returned. If a specified
+ attribute is not found, it does not appear in the result. Note that
+ AttributesToGet has no effect on provisioned throughput
+ consumption. DynamoDB determines capacity units consumed based on
+ item size, not on the amount of data that is returned to an
+ application.
+ ConsistentRead - If `True`, a strongly consistent read is used; if
`False` (the default), an eventually consistent read is used.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
"""
params = {'RequestItems': request_items, }
@@ -183,27 +312,39 @@ class DynamoDBConnection(AWSQueryConnection):
unprocessed items and submit a new BatchWriteItem request with
those unprocessed items until all items have been processed.
- To write one item, you can use the PutItem operation; to
- delete one item, you can use the DeleteItem operation.
+ Note that if none of the items can be processed due to
+ insufficient provisioned throughput on all of the tables in
+ the request, then BatchGetItem will throw a
+ ProvisionedThroughputExceededException .
+
+ If DynamoDB returns any unprocessed items, you should retry
+ the batch operation on those items. However, we strongly
+ recommend that you use an exponential backoff algorithm . If
+ you retry the batch operation immediately, the underlying read
+ or write requests can still fail due to throttling on the
+ individual tables. If you delay the batch operation using
+ exponential backoff, the individual requests in the batch are
+ much more likely to succeed.
+
+ For more information, go to `Batch Operations and Error
+ Handling`_ in the Amazon DynamoDB Developer Guide.
With BatchWriteItem , you can efficiently write or delete
large amounts of data, such as from Amazon Elastic MapReduce
- (EMR), or copy data from another database into Amazon
- DynamoDB. In order to improve performance with these large-
- scale operations, BatchWriteItem does not behave in the same
- way as individual PutItem and DeleteItem calls would For
- example, you cannot specify conditions on individual put and
- delete requests, and BatchWriteItem does not return deleted
- items in the response.
+ (EMR), or copy data from another database into DynamoDB. In
+ order to improve performance with these large-scale
+ operations, BatchWriteItem does not behave in the same way as
+ individual PutItem and DeleteItem calls would For example, you
+ cannot specify conditions on individual put and delete
+ requests, and BatchWriteItem does not return deleted items in
+ the response.
If you use a programming language that supports concurrency,
such as Java, you can use threads to write items in parallel.
Your application must include the necessary logic to manage
- the threads.
-
- With languages that don't support threading, such as PHP,
- BatchWriteItem will write or delete the specified items one at
- a time. In both situations, BatchWriteItem provides an
+ the threads. With languages that don't support threading, such
+ as PHP, you must update or delete the specified items one at a
+ time. In both situations, BatchWriteItem provides an
alternative where the API performs the specified put and
delete operations in parallel, giving you the power of the
thread pool approach without having to introduce complexity
@@ -215,8 +356,8 @@ class DynamoDBConnection(AWSQueryConnection):
operations on nonexistent items consume one write capacity
unit.
- If one or more of the following is true, Amazon DynamoDB
- rejects the entire batch write operation:
+ If one or more of the following is true, DynamoDB rejects the
+ entire batch write operation:
+ One or more tables specified in the BatchWriteItem request
@@ -241,8 +382,12 @@ class DynamoDBConnection(AWSQueryConnection):
The item to be deleted is identified by a Key subelement:
+ Key - A map of primary key attribute values that uniquely identify
- the item. Each entry in this map consists of an attribute name and
- an attribute value.
+ the ! item. Each entry in this map consists of an attribute name
+ and an attribute value. For each primary key, you must provide all
+ of the key attributes. For example, with a hash type primary key,
+ you only need to specify the hash attribute. For a hash-and-range
+ type primary key, you must specify both the hash attribute and the
+ range attribute.
+ PutRequest - Perform a PutItem operation on the specified item. The
item to be put is identified by an Item subelement:
@@ -257,15 +402,17 @@ class DynamoDBConnection(AWSQueryConnection):
match those of the schema in the table's attribute definition.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
:type return_item_collection_metrics: string
:param return_item_collection_metrics: If set to `SIZE`, statistics
about item collections, if any, that were modified during the
operation are returned in the response. If set to `NONE` (the
- default), no statistics are returned..
+ default), no statistics are returned.
"""
params = {'RequestItems': request_items, }
@@ -286,16 +433,15 @@ class DynamoDBConnection(AWSQueryConnection):
the tables in different regions.
CreateTable is an asynchronous operation. Upon receiving a
- CreateTable request, Amazon DynamoDB immediately returns a
- response with a TableStatus of `CREATING`. After the table is
- created, Amazon DynamoDB sets the TableStatus to `ACTIVE`. You
- can perform read and write operations only on an `ACTIVE`
- table.
+ CreateTable request, DynamoDB immediately returns a response
+ with a TableStatus of `CREATING`. After the table is created,
+ DynamoDB sets the TableStatus to `ACTIVE`. You can perform
+ read and write operations only on an `ACTIVE` table.
- If you want to create multiple tables with local secondary
- indexes on them, you must create them sequentially. Only one
- table with local secondary indexes can be in the `CREATING`
- state at any given time.
+ If you want to create multiple tables with secondary indexes
+ on them, you must create them sequentially. Only one table
+ with secondary indexes can be in the `CREATING` state at any
+ given time.
You can use the DescribeTable API to check the table status.
@@ -308,9 +454,9 @@ class DynamoDBConnection(AWSQueryConnection):
:type key_schema: list
:param key_schema: Specifies the attributes that make up the primary
- key for the table. The attributes in KeySchema must also be defined
- in the AttributeDefinitions array. For more information, see `Data
- Model`_ in the Amazon DynamoDB Developer Guide.
+ key for a table or an index. The attributes in KeySchema must also
+ be defined in the AttributeDefinitions array. For more information,
+ see `Data Model`_ in the Amazon DynamoDB Developer Guide.
Each KeySchemaElement in the array is composed of:
@@ -331,18 +477,19 @@ class DynamoDBConnection(AWSQueryConnection):
:type local_secondary_indexes: list
:param local_secondary_indexes:
- One or more secondary indexes (the maximum is five) to be created on
- the table. Each index is scoped to a given hash key value. There is
- a 10 gigabyte size limit per hash key; otherwise, the size of a
- local secondary index is unconstrained.
+ One or more local secondary indexes (the maximum is five) to be created
+ on the table. Each index is scoped to a given hash key value. There
+ is a 10 GB size limit per hash key; otherwise, the size of a local
+ secondary index is unconstrained.
- Each secondary index in the array includes the following:
+ Each local secondary index in the array includes the following:
- + IndexName - The name of the secondary index. Must be unique only for
- this table.
- + KeySchema - Specifies the key schema for the index. The key schema
- must begin with the same hash key attribute as the table.
+ + IndexName - The name of the local secondary index. Must be unique
+ only for this table.
+ + KeySchema - Specifies the key schema for the local secondary index.
+ The key schema must begin with the same hash key attribute as the
+ table.
+ Projection - Specifies attributes that are copied (projected) from
the table into the index. These are in addition to the primary key
attributes and index key attributes, which are automatically
@@ -358,19 +505,51 @@ class DynamoDBConnection(AWSQueryConnection):
+ `ALL` - All of the table attributes are projected into the index.
+ NonKeyAttributes - A list of one or more non-key attribute names that
- are projected into the index. The total count of attributes
- specified in NonKeyAttributes , summed across all of the local
+ are projected into the secondary index. The total count of
+ attributes specified in NonKeyAttributes , summed across all of the
secondary indexes, must not exceed 20. If you project the same
attribute into two different indexes, this counts as two distinct
attributes when determining the total.
:type global_secondary_indexes: list
:param global_secondary_indexes:
+ One or more global secondary indexes (the maximum is five) to be
+ created on the table. Each global secondary index in the array
+ includes the following:
+
+
+ + IndexName - The name of the global secondary index. Must be unique
+ only for this table.
+ + KeySchema - Specifies the key schema for the global secondary index.
+ + Projection - Specifies attributes that are copied (projected) from
+ the table into the index. These are in addition to the primary key
+ attributes and index key attributes, which are automatically
+ projected. Each attribute specification is composed of:
+
+ + ProjectionType - One of the following:
+
+ + `KEYS_ONLY` - Only the index and primary keys are projected into the
+ index.
+ + `INCLUDE` - Only the specified table attributes are projected into
+ the index. The list of projected attributes are in NonKeyAttributes
+ .
+ + `ALL` - All of the table attributes are projected into the index.
+
+ + NonKeyAttributes - A list of one or more non-key attribute names that
+ are projected into the secondary index. The total count of
+ attributes specified in NonKeyAttributes , summed across all of the
+ secondary indexes, must not exceed 20. If you project the same
+ attribute into two different indexes, this counts as two distinct
+ attributes when determining the total.
+
+ + ProvisionedThroughput - The provisioned throughput settings for the
+ global secondary index, consisting of read and write capacity
+ units.
:type provisioned_throughput: dict
- :param provisioned_throughput: The provisioned throughput settings for
- the specified table. The settings can be modified using the
- UpdateTable operation.
+ :param provisioned_throughput: Represents the provisioned throughput
+ settings for a specified table or index. The settings can be
+ modified using the UpdateTable operation.
For current minimum and maximum provisioned throughput values, see
`Limits`_ in the Amazon DynamoDB Developer Guide.
@@ -388,7 +567,8 @@ class DynamoDBConnection(AWSQueryConnection):
return self.make_request(action='CreateTable',
body=json.dumps(params))
- def delete_item(self, table_name, key, expected=None, return_values=None,
+ def delete_item(self, table_name, key, expected=None,
+ conditional_operator=None, return_values=None,
return_consumed_capacity=None,
return_item_collection_metrics=None):
"""
@@ -406,8 +586,8 @@ class DynamoDBConnection(AWSQueryConnection):
Conditional deletes are useful for only deleting items if
specific conditions are met. If those conditions are met,
- Amazon DynamoDB performs the delete. Otherwise, the item is
- not deleted.
+ DynamoDB performs the delete. Otherwise, the item is not
+ deleted.
:type table_name: string
:param table_name: The name of the table from which to delete the item.
@@ -415,50 +595,181 @@ class DynamoDBConnection(AWSQueryConnection):
:type key: map
:param key: A map of attribute names to AttributeValue objects,
representing the primary key of the item to delete.
+ For the primary key, you must provide all of the attributes. For
+ example, with a hash type primary key, you only need to specify the
+ hash attribute. For a hash-and-range type primary key, you must
+ specify both the hash attribute and the range attribute.
:type expected: map
- :param expected: A map of attribute/condition pairs. This is the
- conditional block for the DeleteItem operation. All the conditions
- must be met for the operation to succeed.
- Expected allows you to provide an attribute name, and whether or not
- Amazon DynamoDB should check to see if the attribute value already
- exists; or if the attribute value exists and has a particular value
- before changing it.
+ :param expected:
+ A map of attribute/condition pairs. This is the conditional block for
+ the DeleteItem operation.
- Each item in Expected represents an attribute name for Amazon DynamoDB
- to check, along with the following:
+ Each element of Expected consists of an attribute name, a comparison
+ operator, and one or more values. DynamoDB compares the attribute
+ with the value(s) you supplied, using the comparison operator. For
+ each Expected element, the result of the evaluation is either true
+ or false.
+
+ If you specify more than one element in the Expected map, then by
+ default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
+
+ If the Expected map evaluates to true, then the conditional operation
+ succeeds; otherwise, it fails.
+
+ Each item in Expected represents an attribute name for DynamoDB to
+ check, along with an AttributeValueList and a ComparisonOperator :
- + Value - The attribute value for Amazon DynamoDB to check.
- + Exists - Causes Amazon DynamoDB to evaluate the value before
- attempting a conditional operation:
+ + AttributeValueList - One or more values to evaluate against the
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
+ `http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values, for example when
+ evaluating query expressions.
+ + ComparisonOperator - A comparator for evaluating attributes in the
+ AttributeValueList . When performing the comparison, DynamoDB uses
+ strongly consistent reads. The following comparison operators are
+ available: `EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL |
+ CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN` The following
+ are descriptions of each comparison operator.
- + If Exists is `True`, Amazon DynamoDB will check to see if that
- attribute value already exists in the table. If it is found, then
- the operation succeeds. If it is not found, the operation fails
- with a ConditionalCheckFailedException .
- + If Exists is `False`, Amazon DynamoDB assumes that the attribute
- value does not exist in the table. If in fact the value does not
- exist, then the assumption is valid and the operation succeeds. If
- the value is found, despite the assumption that it does not exist,
- the operation fails with a ConditionalCheckFailedException .
- The default setting for Exists is `True`. If you supply a Value all by
- itself, Amazon DynamoDB assumes the attribute exists: You don't
- have to set Exists to `True`, because it is implied. Amazon
- DynamoDB returns a ValidationException if:
-
- + Exists is `True` but there is no Value to check. (You expect a value
- to exist, but don't specify what that value is.)
- + Exists is `False` but you also specify a Value . (You cannot expect
- an attribute to have a value, while also expecting it not to
- exist.)
+ + `EQ` : Equal. AttributeValueList can contain only one AttributeValue
+ of type String, Number, Binary, String Set, Number Set, or Binary
+ Set. If an item contains an AttributeValue of a different type than
+ the one specified in the request, the value does not match. For
+ example, `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}`
+ does not equal `{"NS":["6", "2", "1"]}`. >
+ + `NE` : Not equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, Binary, String Set, Number
+ Set, or Binary Set. If an item contains an AttributeValue of a
+ different type than the one specified in the request, the value
+ does not match. For example, `{"S":"6"}` does not equal
+ `{"N":"6"}`. Also, `{"N":"6"}` does not equal `{"NS":["6", "2",
+ "1"]}`. >
+ + `LE` : Less than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `LT` : Less than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GE` : Greater than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GT` : Greater than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `NOT_NULL` : The attribute exists.
+ + `NULL` : The attribute does not exist.
+ + `CONTAINS` : checks for a subsequence, or value in a set.
+ AttributeValueList can contain only one AttributeValue of type
+ String, Number, or Binary (not a set). If the target attribute of
+ the comparison is a String, then the operation checks for a
+ substring match. If the target attribute of the comparison is
+ Binary, then the operation looks for a subsequence of the target
+ that matches the input. If the target attribute of the comparison
+ is a set ("SS", "NS", or "BS"), then the operation checks for a
+ member of the set (not as a substring).
+ + `NOT_CONTAINS` : checks for absence of a subsequence, or absence of a
+ value in a set. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If
+ the target attribute of the comparison is a String, then the
+ operation checks for the absence of a substring match. If the
+ target attribute of the comparison is Binary, then the operation
+ checks for the absence of a subsequence of the target that matches
+ the input. If the target attribute of the comparison is a set
+ ("SS", "NS", or "BS"), then the operation checks for the absence of
+ a member of the set (not as a substring).
+ + `BEGINS_WITH` : checks for a prefix. AttributeValueList can contain
+ only one AttributeValue of type String or Binary (not a Number or a
+ set). The target attribute of the comparison must be a String or
+ Binary (not a Number or a set). >
+ + `IN` : checks for exact matches. AttributeValueList can contain more
+ than one AttributeValue of type String, Number, or Binary (not a
+ set). The target attribute of the comparison must be of the same
+ type and exact value to match. A String never matches a String set.
+ + `BETWEEN` : Greater than or equal to the first value, and less than
+ or equal to the second value. AttributeValueList must contain two
+ AttributeValue elements of the same type, either String, Number, or
+ Binary (not a set). A target attribute matches if the target value
+ is greater than, or equal to, the first element and less than, or
+ equal to, the second element. If an item contains an AttributeValue
+ of a different type than the one specified in the request, the
+ value does not match. For example, `{"S":"6"}` does not compare to
+ `{"N":"6"}`. Also, `{"N":"6"}` does not compare to `{"NS":["6",
+ "2", "1"]}`
- If you specify more than one condition for Exists , then all of the
- conditions must evaluate to true. (In other words, the conditions
- are ANDed together.) Otherwise, the conditional operation will
- fail.
+ For usage examples of AttributeValueList and ComparisonOperator , see
+ `Conditional Expressions`_ in the Amazon DynamoDB Developer Guide.
+
+
+ For backward compatibility with previous DynamoDB releases, the
+ following parameters can be used instead of AttributeValueList and
+ ComparisonOperator :
+
+
+ + Value - A value for DynamoDB to compare with an attribute.
+ + Exists - Causes DynamoDB to evaluate the value before attempting the
+ conditional operation:
+
+ + If Exists is `True`, DynamoDB will check to see if that attribute
+ value already exists in the table. If it is found, then the
+ condition evaluates to true; otherwise the condition evaluate to
+ false.
+ + If Exists is `False`, DynamoDB assumes that the attribute value does
+ not exist in the table. If in fact the value does not exist, then
+ the assumption is valid and the condition evaluates to true. If the
+ value is found, despite the assumption that it does not exist, the
+ condition evaluates to false.
+
+
+
+ Even though DynamoDB continues to accept the Value and Exists
+ parameters, they are now deprecated. We recommend that you use
+ AttributeValueList and ComparisonOperator instead, since they allow
+ you to construct a much wider range of conditions.
+
+ The Value and Exists parameters are incompatible with
+ AttributeValueList and ComparisonOperator . If you attempt to use
+ both sets of parameters at once, DynamoDB will throw a
+ ValidationException .
+
+ :type conditional_operator: string
+ :param conditional_operator: A logical operator to apply to the
+ conditions in the Expected map:
+
+ + `AND` - If all of the conditions evaluate to true, then the entire
+ map evaluates to true.
+ + `OR` - If at least one of the conditions evaluate to true, then the
+ entire map evaluates to true.
+
+
+ If you omit ConditionalOperator , then `AND` is the default.
+
+ The operation will succeed only if the entire map evaluates to true.
:type return_values: string
:param return_values:
@@ -472,20 +783,24 @@ class DynamoDBConnection(AWSQueryConnection):
+ `ALL_OLD` - The content of the old item is returned.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
:type return_item_collection_metrics: string
:param return_item_collection_metrics: If set to `SIZE`, statistics
about item collections, if any, that were modified during the
operation are returned in the response. If set to `NONE` (the
- default), no statistics are returned..
+ default), no statistics are returned.
"""
params = {'TableName': table_name, 'Key': key, }
if expected is not None:
params['Expected'] = expected
+ if conditional_operator is not None:
+ params['ConditionalOperator'] = conditional_operator
if return_values is not None:
params['ReturnValues'] = return_values
if return_consumed_capacity is not None:
@@ -499,20 +814,20 @@ class DynamoDBConnection(AWSQueryConnection):
"""
The DeleteTable operation deletes a table and all of its
items. After a DeleteTable request, the specified table is in
- the `DELETING` state until Amazon DynamoDB completes the
- deletion. If the table is in the `ACTIVE` state, you can
- delete it. If a table is in `CREATING` or `UPDATING` states,
- then Amazon DynamoDB returns a ResourceInUseException . If the
- specified table does not exist, Amazon DynamoDB returns a
- ResourceNotFoundException . If table is already in the
- `DELETING` state, no error is returned.
+ the `DELETING` state until DynamoDB completes the deletion. If
+ the table is in the `ACTIVE` state, you can delete it. If a
+ table is in `CREATING` or `UPDATING` states, then DynamoDB
+ returns a ResourceInUseException . If the specified table does
+ not exist, DynamoDB returns a ResourceNotFoundException . If
+ table is already in the `DELETING` state, no error is
+ returned.
- Amazon DynamoDB might continue to accept data read and write
+ DynamoDB might continue to accept data read and write
operations, such as GetItem and PutItem , on a table in the
`DELETING` state until the table deletion is complete.
- When you delete a table, any local secondary indexes on that
- table are also deleted.
+ When you delete a table, any indexes on that table are also
+ deleted.
Use the DescribeTable API to check the status of the table.
@@ -557,12 +872,20 @@ class DynamoDBConnection(AWSQueryConnection):
:type key: map
:param key: A map of attribute names to AttributeValue objects,
representing the primary key of the item to retrieve.
+ For the primary key, you must provide all of the attributes. For
+ example, with a hash type primary key, you only need to specify the
+ hash attribute. For a hash-and-range type primary key, you must
+ specify both the hash attribute and the range attribute.
:type attributes_to_get: list
:param attributes_to_get: The names of one or more attributes to
retrieve. If no attribute names are specified, then all attributes
will be returned. If any of the requested attributes are not found,
they will not appear in the result.
+ Note that AttributesToGet has no effect on provisioned throughput
+ consumption. DynamoDB determines capacity units consumed based on
+ item size, not on the amount of data that is returned to an
+ application.
:type consistent_read: boolean
:param consistent_read: If set to `True`, then the operation uses
@@ -570,9 +893,11 @@ class DynamoDBConnection(AWSQueryConnection):
are used.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
"""
params = {'TableName': table_name, 'Key': key, }
@@ -587,17 +912,19 @@ class DynamoDBConnection(AWSQueryConnection):
def list_tables(self, exclusive_start_table_name=None, limit=None):
"""
- Returns an array of all the tables associated with the current
- account and endpoint.
+ Returns an array of table names associated with the current
+ account and endpoint. The output from ListTables is paginated,
+ with each page returning a maximum of 100 table names.
:type exclusive_start_table_name: string
- :param exclusive_start_table_name: The name of the table that starts
- the list. If you already ran a ListTables operation and received a
- LastEvaluatedTableName value in the response, use that value here
- to continue the list.
+ :param exclusive_start_table_name: The first table name that this
+ operation will evaluate. Use the value that was returned for
+ LastEvaluatedTableName in a previous operation, so that you can
+ obtain the next page of results.
:type limit: integer
- :param limit: A maximum number of table names to return.
+ :param limit: A maximum number of table names to return. If this
+ parameter is not specified, the limit is 100.
"""
params = {}
@@ -610,7 +937,8 @@ class DynamoDBConnection(AWSQueryConnection):
def put_item(self, table_name, item, expected=None, return_values=None,
return_consumed_capacity=None,
- return_item_collection_metrics=None):
+ return_item_collection_metrics=None,
+ conditional_operator=None):
"""
Creates a new item, or replaces an old item with a new item.
If an item already exists in the specified table with the same
@@ -635,8 +963,8 @@ class DynamoDBConnection(AWSQueryConnection):
description.
To prevent a new item from replacing an existing item, use a
- conditional put operation with Exists set to `False` for the
- primary key attribute, or attributes.
+ conditional put operation with ComparisonOperator set to
+ `NULL` for the primary key attribute, or attributes.
For more information about using this API, see `Working with
Items`_ in the Amazon DynamoDB Developer Guide.
@@ -648,6 +976,11 @@ class DynamoDBConnection(AWSQueryConnection):
:param item: A map of attribute name/value pairs, one for each
attribute. Only the primary key attributes are required; you can
optionally provide other attribute name-value pairs for the item.
+ You must provide all of the attributes for the primary key. For
+ example, with a hash type primary key, you only need to specify the
+ hash attribute. For a hash-and-range type primary key, you must
+ specify both the hash attribute and the range attribute.
+
If you specify any attributes that are part of an index key, then the
data types for those attributes must match those of the schema in
the table's attribute definition.
@@ -658,48 +991,161 @@ class DynamoDBConnection(AWSQueryConnection):
Each element in the Item map is an AttributeValue object.
:type expected: map
- :param expected: A map of attribute/condition pairs. This is the
- conditional block for the PutItem operation. All the conditions
- must be met for the operation to succeed.
- Expected allows you to provide an attribute name, and whether or not
- Amazon DynamoDB should check to see if the attribute value already
- exists; or if the attribute value exists and has a particular value
- before changing it.
+ :param expected:
+ A map of attribute/condition pairs. This is the conditional block for
+ the PutItem operation.
- Each item in Expected represents an attribute name for Amazon DynamoDB
- to check, along with the following:
+ Each element of Expected consists of an attribute name, a comparison
+ operator, and one or more values. DynamoDB compares the attribute
+ with the value(s) you supplied, using the comparison operator. For
+ each Expected element, the result of the evaluation is either true
+ or false.
+
+ If you specify more than one element in the Expected map, then by
+ default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
+
+ If the Expected map evaluates to true, then the conditional operation
+ succeeds; otherwise, it fails.
+
+ Each item in Expected represents an attribute name for DynamoDB to
+ check, along with an AttributeValueList and a ComparisonOperator :
- + Value - The attribute value for Amazon DynamoDB to check.
- + Exists - Causes Amazon DynamoDB to evaluate the value before
- attempting a conditional operation:
+ + AttributeValueList - One or more values to evaluate against the
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
+ `http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values, for example when
+ evaluating query expressions.
+ + ComparisonOperator - A comparator for evaluating attributes in the
+ AttributeValueList . When performing the comparison, DynamoDB uses
+ strongly consistent reads. The following comparison operators are
+ available: `EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL |
+ CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN` The following
+ are descriptions of each comparison operator.
- + If Exists is `True`, Amazon DynamoDB will check to see if that
- attribute value already exists in the table. If it is found, then
- the operation succeeds. If it is not found, the operation fails
- with a ConditionalCheckFailedException .
- + If Exists is `False`, Amazon DynamoDB assumes that the attribute
- value does not exist in the table. If in fact the value does not
- exist, then the assumption is valid and the operation succeeds. If
- the value is found, despite the assumption that it does not exist,
- the operation fails with a ConditionalCheckFailedException .
- The default setting for Exists is `True`. If you supply a Value all by
- itself, Amazon DynamoDB assumes the attribute exists: You don't
- have to set Exists to `True`, because it is implied. Amazon
- DynamoDB returns a ValidationException if:
-
- + Exists is `True` but there is no Value to check. (You expect a value
- to exist, but don't specify what that value is.)
- + Exists is `False` but you also specify a Value . (You cannot expect
- an attribute to have a value, while also expecting it not to
- exist.)
+ + `EQ` : Equal. AttributeValueList can contain only one AttributeValue
+ of type String, Number, Binary, String Set, Number Set, or Binary
+ Set. If an item contains an AttributeValue of a different type than
+ the one specified in the request, the value does not match. For
+ example, `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}`
+ does not equal `{"NS":["6", "2", "1"]}`. >
+ + `NE` : Not equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, Binary, String Set, Number
+ Set, or Binary Set. If an item contains an AttributeValue of a
+ different type than the one specified in the request, the value
+ does not match. For example, `{"S":"6"}` does not equal
+ `{"N":"6"}`. Also, `{"N":"6"}` does not equal `{"NS":["6", "2",
+ "1"]}`. >
+ + `LE` : Less than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `LT` : Less than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GE` : Greater than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GT` : Greater than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `NOT_NULL` : The attribute exists.
+ + `NULL` : The attribute does not exist.
+ + `CONTAINS` : checks for a subsequence, or value in a set.
+ AttributeValueList can contain only one AttributeValue of type
+ String, Number, or Binary (not a set). If the target attribute of
+ the comparison is a String, then the operation checks for a
+ substring match. If the target attribute of the comparison is
+ Binary, then the operation looks for a subsequence of the target
+ that matches the input. If the target attribute of the comparison
+ is a set ("SS", "NS", or "BS"), then the operation checks for a
+ member of the set (not as a substring).
+ + `NOT_CONTAINS` : checks for absence of a subsequence, or absence of a
+ value in a set. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If
+ the target attribute of the comparison is a String, then the
+ operation checks for the absence of a substring match. If the
+ target attribute of the comparison is Binary, then the operation
+ checks for the absence of a subsequence of the target that matches
+ the input. If the target attribute of the comparison is a set
+ ("SS", "NS", or "BS"), then the operation checks for the absence of
+ a member of the set (not as a substring).
+ + `BEGINS_WITH` : checks for a prefix. AttributeValueList can contain
+ only one AttributeValue of type String or Binary (not a Number or a
+ set). The target attribute of the comparison must be a String or
+ Binary (not a Number or a set). >
+ + `IN` : checks for exact matches. AttributeValueList can contain more
+ than one AttributeValue of type String, Number, or Binary (not a
+ set). The target attribute of the comparison must be of the same
+ type and exact value to match. A String never matches a String set.
+ + `BETWEEN` : Greater than or equal to the first value, and less than
+ or equal to the second value. AttributeValueList must contain two
+ AttributeValue elements of the same type, either String, Number, or
+ Binary (not a set). A target attribute matches if the target value
+ is greater than, or equal to, the first element and less than, or
+ equal to, the second element. If an item contains an AttributeValue
+ of a different type than the one specified in the request, the
+ value does not match. For example, `{"S":"6"}` does not compare to
+ `{"N":"6"}`. Also, `{"N":"6"}` does not compare to `{"NS":["6",
+ "2", "1"]}`
- If you specify more than one condition for Exists , then all of the
- conditions must evaluate to true. (In other words, the conditions
- are ANDed together.) Otherwise, the conditional operation will
- fail.
+ For usage examples of AttributeValueList and ComparisonOperator , see
+ `Conditional Expressions`_ in the Amazon DynamoDB Developer Guide.
+
+
+ For backward compatibility with previous DynamoDB releases, the
+ following parameters can be used instead of AttributeValueList and
+ ComparisonOperator :
+
+
+ + Value - A value for DynamoDB to compare with an attribute.
+ + Exists - Causes DynamoDB to evaluate the value before attempting the
+ conditional operation:
+
+ + If Exists is `True`, DynamoDB will check to see if that attribute
+ value already exists in the table. If it is found, then the
+ condition evaluates to true; otherwise the condition evaluate to
+ false.
+ + If Exists is `False`, DynamoDB assumes that the attribute value does
+ not exist in the table. If in fact the value does not exist, then
+ the assumption is valid and the condition evaluates to true. If the
+ value is found, despite the assumption that it does not exist, the
+ condition evaluates to false.
+
+
+
+ Even though DynamoDB continues to accept the Value and Exists
+ parameters, they are now deprecated. We recommend that you use
+ AttributeValueList and ComparisonOperator instead, since they allow
+ you to construct a much wider range of conditions.
+
+ The Value and Exists parameters are incompatible with
+ AttributeValueList and ComparisonOperator . If you attempt to use
+ both sets of parameters at once, DynamoDB will throw a
+ ValidationException .
:type return_values: string
:param return_values:
@@ -714,15 +1160,31 @@ class DynamoDBConnection(AWSQueryConnection):
the content of the old item is returned.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
:type return_item_collection_metrics: string
:param return_item_collection_metrics: If set to `SIZE`, statistics
about item collections, if any, that were modified during the
operation are returned in the response. If set to `NONE` (the
- default), no statistics are returned..
+ default), no statistics are returned.
+
+ :type conditional_operator: string
+ :param conditional_operator: A logical operator to apply to the
+ conditions in the Expected map:
+
+ + `AND` - If all of the conditions evaluate to true, then the entire
+ map evaluates to true.
+ + `OR` - If at least one of the conditions evaluate to true, then the
+ entire map evaluates to true.
+
+
+ If you omit ConditionalOperator , then `AND` is the default.
+
+ The operation will succeed only if the entire map evaluates to true.
"""
params = {'TableName': table_name, 'Item': item, }
@@ -734,13 +1196,16 @@ class DynamoDBConnection(AWSQueryConnection):
params['ReturnConsumedCapacity'] = return_consumed_capacity
if return_item_collection_metrics is not None:
params['ReturnItemCollectionMetrics'] = return_item_collection_metrics
+ if conditional_operator is not None:
+ params['ConditionalOperator'] = conditional_operator
return self.make_request(action='PutItem',
body=json.dumps(params))
- def query(self, table_name, index_name=None, select=None,
+ def query(self, table_name, key_conditions, index_name=None, select=None,
attributes_to_get=None, limit=None, consistent_read=None,
- key_conditions=None, scan_index_forward=None,
- exclusive_start_key=None, return_consumed_capacity=None):
+ query_filter=None, conditional_operator=None,
+ scan_index_forward=None, exclusive_start_key=None,
+ return_consumed_capacity=None):
"""
A Query operation directly accesses items from a table using
the table primary key, or from an index using the index key.
@@ -761,15 +1226,20 @@ class DynamoDBConnection(AWSQueryConnection):
and a LastEvaluatedKey . The LastEvaluatedKey is only provided
if the results exceed 1 MB, or if you have used Limit .
- To request a strongly consistent result, set ConsistentRead to
- true.
+ You can query a table, a local secondary index, or a global
+ secondary index. For a query on a table or on a local
+ secondary index, you can set ConsistentRead to true and obtain
+ a strongly consistent result. Global secondary indexes support
+ eventually consistent reads only, so do not specify
+ ConsistentRead when querying a global secondary index.
:type table_name: string
:param table_name: The name of the table containing the requested
items.
:type index_name: string
- :param index_name: The name of an index on the table to query.
+ :param index_name: The name of an index to query. This can be any local
+ secondary index or global secondary index on the table.
:type select: string
:param select: The attributes to be returned in the result. You can
@@ -777,31 +1247,35 @@ class DynamoDBConnection(AWSQueryConnection):
of matching items, or in the case of an index, some or all of the
attributes projected into the index.
- + `ALL_ATTRIBUTES`: Returns all of the item attributes. For a table,
- this is the default. For an index, this mode causes Amazon DynamoDB
- to fetch the full item from the table for each matching item in the
- index. If the index is configured to project all item attributes,
- the matching items will not be fetched from the table. Fetching
- items from the table incurs additional throughput cost and latency.
+ + `ALL_ATTRIBUTES`: Returns all of the item attributes from the
+ specified table or index. If you are querying a local secondary
+ index, then for each matching item in the index DynamoDB will fetch
+ the entire item from the parent table. If the index is configured
+ to project all item attributes, then all of the data can be
+ obtained from the local secondary index, and no fetching is
+ required..
+ `ALL_PROJECTED_ATTRIBUTES`: Allowed only when querying an index.
Retrieves all attributes which have been projected into the index.
If the index is configured to project all attributes, this is
- equivalent to specifying ALL_ATTRIBUTES .
+ equivalent to specifying `ALL_ATTRIBUTES`.
+ `COUNT`: Returns the number of matching items, rather than the
matching items themselves.
+ `SPECIFIC_ATTRIBUTES` : Returns only the attributes listed in
AttributesToGet . This is equivalent to specifying AttributesToGet
- without specifying any value for Select . If you are querying an
- index and request only attributes that are projected into that
- index, the operation will read only the index and not the table. If
- any of the requested attributes are not projected into the index,
- Amazon DynamoDB will need to fetch each matching item from the
- table. This extra fetching incurs additional throughput cost and
- latency.
+ without specifying any value for Select . If you are querying a
+ local secondary index and request only attributes that are
+ projected into that index, the operation will read only the index
+ and not the table. If any of the requested attributes are not
+ projected into the local secondary index, DynamoDB will fetch each
+ of these attributes from the parent table. This extra fetching
+ incurs additional throughput cost and latency. If you are querying
+ a global secondary index, you can only request attributes that are
+ projected into the index. Global secondary index queries cannot
+ fetch attributes from the parent table.
- When neither Select nor AttributesToGet are specified, Amazon DynamoDB
- defaults to `ALL_ATTRIBUTES` when accessing a table, and
+ If neither Select nor AttributesToGet are specified, DynamoDB defaults
+ to `ALL_ATTRIBUTES` when accessing a table, and
`ALL_PROJECTED_ATTRIBUTES` when accessing an index. You cannot use
both Select and AttributesToGet together in a single request,
unless the value for Select is `SPECIFIC_ATTRIBUTES`. (This usage
@@ -813,75 +1287,87 @@ class DynamoDBConnection(AWSQueryConnection):
retrieve. If no attribute names are specified, then all attributes
will be returned. If any of the requested attributes are not found,
they will not appear in the result.
- If you are querying an index and request only attributes that are
- projected into that index, the operation will read only the index
- and not the table. If any of the requested attributes are not
- projected into the index, Amazon DynamoDB will need to fetch each
- matching item from the table. This extra fetching incurs additional
- throughput cost and latency.
+ Note that AttributesToGet has no effect on provisioned throughput
+ consumption. DynamoDB determines capacity units consumed based on
+ item size, not on the amount of data that is returned to an
+ application.
You cannot use both AttributesToGet and Select together in a Query
request, unless the value for Select is `SPECIFIC_ATTRIBUTES`.
(This usage is equivalent to specifying AttributesToGet without any
value for Select .)
+ If you are querying a local secondary index and request only attributes
+ that are projected into that index, the operation will read only
+ the index and not the table. If any of the requested attributes are
+ not projected into the local secondary index, DynamoDB will fetch
+ each of these attributes from the parent table. This extra fetching
+ incurs additional throughput cost and latency.
+
+ If you are querying a global secondary index, you can only request
+ attributes that are projected into the index. Global secondary
+ index queries cannot fetch attributes from the parent table.
+
:type limit: integer
:param limit: The maximum number of items to evaluate (not necessarily
- the number of matching items). If Amazon DynamoDB processes the
- number of items up to the limit while processing the results, it
- stops the operation and returns the matching values up to that
- point, and a LastEvaluatedKey to apply in a subsequent operation,
- so that you can pick up where you left off. Also, if the processed
- data set size exceeds 1 MB before Amazon DynamoDB reaches this
- limit, it stops the operation and returns the matching values up to
- the limit, and a LastEvaluatedKey to apply in a subsequent
- operation to continue the operation. For more information see
- `Query and Scan`_ in the Amazon DynamoDB Developer Guide.
+ the number of matching items). If DynamoDB processes the number of
+ items up to the limit while processing the results, it stops the
+ operation and returns the matching values up to that point, and a
+ LastEvaluatedKey to apply in a subsequent operation, so that you
+ can pick up where you left off. Also, if the processed data set
+ size exceeds 1 MB before DynamoDB reaches this limit, it stops the
+ operation and returns the matching values up to the limit, and a
+ LastEvaluatedKey to apply in a subsequent operation to continue the
+ operation. For more information, see `Query and Scan`_ in the
+ Amazon DynamoDB Developer Guide.
:type consistent_read: boolean
:param consistent_read: If set to `True`, then the operation uses
strongly consistent reads; otherwise, eventually consistent reads
are used.
+ Strongly consistent reads are not supported on global secondary
+ indexes. If you query a global secondary index with ConsistentRead
+ set to `True`, you will receive an error message.
:type key_conditions: map
- :param key_conditions:
- The selection criteria for the query.
-
+ :param key_conditions: The selection criteria for the query.
For a query on a table, you can only have conditions on the table
primary key attributes. You must specify the hash key attribute
name and value as an `EQ` condition. You can optionally specify a
second condition, referring to the range key attribute.
- For a query on a secondary index, you can only have conditions on the
- index key attributes. You must specify the index hash attribute
- name and value as an EQ condition. You can optionally specify a
- second condition, referring to the index key range attribute.
+ For a query on an index, you can only have conditions on the index key
+ attributes. You must specify the index hash attribute name and
+ value as an EQ condition. You can optionally specify a second
+ condition, referring to the index key range attribute.
- Multiple conditions are evaluated using "AND"; in other words, all of
- the conditions must be met in order for an item to appear in the
- results results.
+ If you specify more than one condition in the KeyConditions map, then
+ by default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
Each KeyConditions element consists of an attribute name to compare,
along with the following:
+ AttributeValueList - One or more values to evaluate against the
- supplied attribute. This list contains exactly one value, except
- for a `BETWEEN` or `IN` comparison, in which case the list contains
- two values. For type Number, value comparisons are numeric. String
- value comparisons for greater than, equals, or less than are based
- on ASCII character code values. For example, `a` is greater than
- `A`, and `aa` is greater than `B`. For a list of code values, see
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
`http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
- For Binary, Amazon DynamoDB treats each byte of the binary data as
+ For Binary, DynamoDB treats each byte of the binary data as
unsigned when it compares binary values, for example when
evaluating query expressions.
+ ComparisonOperator - A comparator for evaluating attributes. For
- example, equals, greater than, less than, etc. Valid comparison
- operators for Query: `EQ | LE | LT | GE | GT | BEGINS_WITH |
- BETWEEN` For information on specifying data types in JSON, see
- `JSON Data Format`_ in the Amazon DynamoDB Developer Guide. The
- following are descriptions of each comparison operator.
+ example, equals, greater than, less than, etc. For KeyConditions ,
+ only the following comparison operators are supported: `EQ | LE |
+ LT | GE | GT | BEGINS_WITH | BETWEEN` The following are
+ descriptions of these comparison operators.
+ `EQ` : Equal. AttributeValueList can contain only one AttributeValue
of type String, Number, or Binary (not a set). If an item contains
@@ -890,33 +1376,33 @@ class DynamoDBConnection(AWSQueryConnection):
not equal `{"N":"6"}`. Also, `{"N":"6"}` does not equal
`{"NS":["6", "2", "1"]}`.
+ `LE` : Less than or equal. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ `LT` : Less than. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ `GE` : Greater than or equal. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ `GT` : Greater than. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ `BEGINS_WITH` : checks for a prefix. AttributeValueList can contain
- only one AttributeValue of type String or Binary (not a Number or a
- set). The target attribute of the comparison must be a String or
- Binary (not a Number or a set).
+ only one AttributeValue of type String or Binary (not a Number or a
+ set). The target attribute of the comparison must be a String or
+ Binary (not a Number or a set). >
+ `BETWEEN` : Greater than or equal to the first value, and less than
or equal to the second value. AttributeValueList must contain two
AttributeValue elements of the same type, either String, Number, or
@@ -928,14 +1414,68 @@ class DynamoDBConnection(AWSQueryConnection):
`{"N":"6"}`. Also, `{"N":"6"}` does not compare to `{"NS":["6",
"2", "1"]}`
+
+
+ For usage examples of AttributeValueList and ComparisonOperator , see
+ `Conditional Expressions`_ in the Amazon DynamoDB Developer Guide.
+
+ :type query_filter: map
+ :param query_filter:
+ Evaluates the query results and returns only the desired values.
+
+ If you specify more than one condition in the QueryFilter map, then by
+ default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
+
+ Each QueryFilter element consists of an attribute name to compare,
+ along with the following:
+
+
+ + AttributeValueList - One or more values to evaluate against the
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
+ `http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values, for example when
+ evaluating query expressions. For information on specifying data
+ types in JSON, see `JSON Data Format`_ in the Amazon DynamoDB
+ Developer Guide.
+ + ComparisonOperator - A comparator for evaluating attributes. For
+ example, equals, greater than, less than, etc. The following
+ comparison operators are available: `EQ | NE | LE | LT | GE | GT |
+ NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN |
+ BETWEEN` For complete descriptions of all comparison operators, see
+ `API_Condition.html`_.
+
+ :type conditional_operator: string
+ :param conditional_operator: A logical operator to apply to the
+ conditions in the QueryFilter map:
+
+ + `AND` - If all of the conditions evaluate to true, then the entire
+ map evaluates to true.
+ + `OR` - If at least one of the conditions evaluate to true, then the
+ entire map evaluates to true.
+
+
+ If you omit ConditionalOperator , then `AND` is the default.
+
+ The operation will succeed only if the entire map evaluates to true.
+
:type scan_index_forward: boolean
:param scan_index_forward: Specifies ascending (true) or descending
- (false) traversal of the index. Amazon DynamoDB returns results
- reflecting the requested order determined by the range key. If the
- data type is Number, the results are returned in numeric order. For
- String, the results are returned in order of ASCII character code
- values. For Binary, Amazon DynamoDB treats each byte of the binary
- data as unsigned when it compares binary values.
+ (false) traversal of the index. DynamoDB returns results reflecting
+ the requested order determined by the range key. If the data type
+ is Number, the results are returned in numeric order. For String,
+ the results are returned in order of ASCII character code values.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values.
If ScanIndexForward is not specified, the results are returned in
ascending order.
@@ -947,12 +1487,17 @@ class DynamoDBConnection(AWSQueryConnection):
No set data types are allowed.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
"""
- params = {'TableName': table_name, }
+ params = {
+ 'TableName': table_name,
+ 'KeyConditions': key_conditions,
+ }
if index_name is not None:
params['IndexName'] = index_name
if select is not None:
@@ -963,8 +1508,10 @@ class DynamoDBConnection(AWSQueryConnection):
params['Limit'] = limit
if consistent_read is not None:
params['ConsistentRead'] = consistent_read
- if key_conditions is not None:
- params['KeyConditions'] = key_conditions
+ if query_filter is not None:
+ params['QueryFilter'] = query_filter
+ if conditional_operator is not None:
+ params['ConditionalOperator'] = conditional_operator
if scan_index_forward is not None:
params['ScanIndexForward'] = scan_index_forward
if exclusive_start_key is not None:
@@ -975,14 +1522,13 @@ class DynamoDBConnection(AWSQueryConnection):
body=json.dumps(params))
def scan(self, table_name, attributes_to_get=None, limit=None,
- select=None, scan_filter=None, exclusive_start_key=None,
- return_consumed_capacity=None, total_segments=None,
- segment=None):
+ select=None, scan_filter=None, conditional_operator=None,
+ exclusive_start_key=None, return_consumed_capacity=None,
+ total_segments=None, segment=None):
"""
The Scan operation returns one or more items and item
attributes by accessing every item in the table. To have
- Amazon DynamoDB return fewer items, you can provide a
- ScanFilter .
+ DynamoDB return fewer items, you can provide a ScanFilter .
If the total number of scanned items exceeds the maximum data
set size limit of 1 MB, the scan stops and results are
@@ -1008,160 +1554,91 @@ class DynamoDBConnection(AWSQueryConnection):
retrieve. If no attribute names are specified, then all attributes
will be returned. If any of the requested attributes are not found,
they will not appear in the result.
+ Note that AttributesToGet has no effect on provisioned throughput
+ consumption. DynamoDB determines capacity units consumed based on
+ item size, not on the amount of data that is returned to an
+ application.
:type limit: integer
:param limit: The maximum number of items to evaluate (not necessarily
- the number of matching items). If Amazon DynamoDB processes the
- number of items up to the limit while processing the results, it
- stops the operation and returns the matching values up to that
- point, and a LastEvaluatedKey to apply in a subsequent operation,
- so that you can pick up where you left off. Also, if the processed
- data set size exceeds 1 MB before Amazon DynamoDB reaches this
- limit, it stops the operation and returns the matching values up to
- the limit, and a LastEvaluatedKey to apply in a subsequent
- operation to continue the operation. For more information see
- `Query and Scan`_ in the Amazon DynamoDB Developer Guide.
+ the number of matching items). If DynamoDB processes the number of
+ items up to the limit while processing the results, it stops the
+ operation and returns the matching values up to that point, and a
+ LastEvaluatedKey to apply in a subsequent operation, so that you
+ can pick up where you left off. Also, if the processed data set
+ size exceeds 1 MB before DynamoDB reaches this limit, it stops the
+ operation and returns the matching values up to the limit, and a
+ LastEvaluatedKey to apply in a subsequent operation to continue the
+ operation. For more information, see `Query and Scan`_ in the
+ Amazon DynamoDB Developer Guide.
:type select: string
:param select: The attributes to be returned in the result. You can
- retrieve all item attributes, specific item attributes, the count
- of matching items, or in the case of an index, some or all of the
- attributes projected into the index.
+ retrieve all item attributes, specific item attributes, or the
+ count of matching items.
- + `ALL_ATTRIBUTES`: Returns all of the item attributes. For a table,
- this is the default. For an index, this mode causes Amazon DynamoDB
- to fetch the full item from the table for each matching item in the
- index. If the index is configured to project all item attributes,
- the matching items will not be fetched from the table. Fetching
- items from the table incurs additional throughput cost and latency.
- + `ALL_PROJECTED_ATTRIBUTES`: Retrieves all attributes which have been
- projected into the index. If the index is configured to project all
- attributes, this is equivalent to specifying ALL_ATTRIBUTES .
+ + `ALL_ATTRIBUTES`: Returns all of the item attributes.
+ `COUNT`: Returns the number of matching items, rather than the
matching items themselves.
+ `SPECIFIC_ATTRIBUTES` : Returns only the attributes listed in
AttributesToGet . This is equivalent to specifying AttributesToGet
- without specifying any value for Select . If you are querying an
- index and request only attributes that are projected into that
- index, the operation will read only the index and not the table. If
- any of the requested attributes are not projected into the index,
- Amazon DynamoDB will need to fetch each matching item from the
- table. This extra fetching incurs additional throughput cost and
- latency.
+ without specifying any value for Select .
- When neither Select nor AttributesToGet are specified, Amazon DynamoDB
- defaults to `ALL_ATTRIBUTES` when accessing a table, and
- `ALL_PROJECTED_ATTRIBUTES` when accessing an index. You cannot use
- both Select and AttributesToGet together in a single request,
- unless the value for Select is `SPECIFIC_ATTRIBUTES`. (This usage
- is equivalent to specifying AttributesToGet without any value for
- Select .)
+ If neither Select nor AttributesToGet are specified, DynamoDB defaults
+ to `ALL_ATTRIBUTES`. You cannot use both Select and AttributesToGet
+ together in a single request, unless the value for Select is
+ `SPECIFIC_ATTRIBUTES`. (This usage is equivalent to specifying
+ AttributesToGet without any value for Select .)
:type scan_filter: map
:param scan_filter:
Evaluates the scan results and returns only the desired values.
- Multiple conditions are treated as "AND" operations: all conditions
- must be met to be included in the results.
- Each ScanConditions element consists of an attribute name to compare,
- along with the following:
+ If you specify more than one condition in the ScanFilter map, then by
+ default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
+
+ Each ScanFilter element consists of an attribute name to compare, along
+ with the following:
+ AttributeValueList - One or more values to evaluate against the
- supplied attribute. This list contains exactly one value, except
- for a `BETWEEN` or `IN` comparison, in which case the list contains
- two values. For type Number, value comparisons are numeric. String
- value comparisons for greater than, equals, or less than are based
- on ASCII character code values. For example, `a` is greater than
- `A`, and `aa` is greater than `B`. For a list of code values, see
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
`http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
- For Binary, Amazon DynamoDB treats each byte of the binary data as
+ For Binary, DynamoDB treats each byte of the binary data as
unsigned when it compares binary values, for example when
- evaluating query expressions.
+ evaluating query expressions. For information on specifying data
+ types in JSON, see `JSON Data Format`_ in the Amazon DynamoDB
+ Developer Guide.
+ ComparisonOperator - A comparator for evaluating attributes. For
- example, equals, greater than, less than, etc. Valid comparison
- operators for Scan: `EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL
- | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN` For
- information on specifying data types in JSON, see `JSON Data
- Format`_ in the Amazon DynamoDB Developer Guide. The following are
- descriptions of each comparison operator.
+ example, equals, greater than, less than, etc. The following
+ comparison operators are available: `EQ | NE | LE | LT | GE | GT |
+ NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN |
+ BETWEEN` For complete descriptions of all comparison operators, see
+ `Condition`_.
- + `EQ` : Equal. AttributeValueList can contain only one AttributeValue
- of type String, Number, or Binary (not a set). If an item contains
- an AttributeValue of a different type than the one specified in the
- request, the value does not match. For example, `{"S":"6"}` does
- not equal `{"N":"6"}`. Also, `{"N":"6"}` does not equal
- `{"NS":["6", "2", "1"]}`.
- + `NE` : Not equal. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- equal `{"NS":["6", "2", "1"]}`.
- + `LE` : Less than or equal. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
- + `LT` : Less than. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
- + `GE` : Greater than or equal. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
- + `GT` : Greater than. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If an
- item contains an AttributeValue of a different type than the one
- specified in the request, the value does not match. For example,
- `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
- compare to `{"NS":["6", "2", "1"]}`.
- + `NOT_NULL` : The attribute exists.
- + `NULL` : The attribute does not exist.
- + `CONTAINS` : checks for a subsequence, or value in a set.
- AttributeValueList can contain only one AttributeValue of type
- String, Number, or Binary (not a set). If the target attribute of
- the comparison is a String, then the operation checks for a
- substring match. If the target attribute of the comparison is
- Binary, then the operation looks for a subsequence of the target
- that matches the input. If the target attribute of the comparison
- is a set ("SS", "NS", or "BS"), then the operation checks for a
- member of the set (not as a substring).
- + `NOT_CONTAINS` : checks for absence of a subsequence, or absence of a
- value in a set. AttributeValueList can contain only one
- AttributeValue of type String, Number, or Binary (not a set). If
- the target attribute of the comparison is a String, then the
- operation checks for the absence of a substring match. If the
- target attribute of the comparison is Binary, then the operation
- checks for the absence of a subsequence of the target that matches
- the input. If the target attribute of the comparison is a set
- ("SS", "NS", or "BS"), then the operation checks for the absence of
- a member of the set (not as a substring).
- + `BEGINS_WITH` : checks for a prefix. AttributeValueList can contain
- only one AttributeValue of type String or Binary (not a Number or a
- set). The target attribute of the comparison must be a String or
- Binary (not a Number or a set).
- + `IN` : checks for exact matches. AttributeValueList can contain more
- than one AttributeValue of type String, Number, or Binary (not a
- set). The target attribute of the comparison must be of the same
- type and exact value to match. A String never matches a String set.
- + `BETWEEN` : Greater than or equal to the first value, and less than
- or equal to the second value. AttributeValueList must contain two
- AttributeValue elements of the same type, either String, Number, or
- Binary (not a set). A target attribute matches if the target value
- is greater than, or equal to, the first element and less than, or
- equal to, the second element. If an item contains an AttributeValue
- of a different type than the one specified in the request, the
- value does not match. For example, `{"S":"6"}` does not compare to
- `{"N":"6"}`. Also, `{"N":"6"}` does not compare to `{"NS":["6",
- "2", "1"]}`
+ :type conditional_operator: string
+ :param conditional_operator: A logical operator to apply to the
+ conditions in the ScanFilter map:
+
+ + `AND` - If all of the conditions evaluate to true, then the entire
+ map evaluates to true.
+ + `OR` - If at least one of the conditions evaluate to true, then the
+ entire map evaluates to true.
+
+
+ If you omit ConditionalOperator , then `AND` is the default.
+
+ The operation will succeed only if the entire map evaluates to true.
:type exclusive_start_key: map
:param exclusive_start_key: The primary key of the first item that this
@@ -1175,9 +1652,11 @@ class DynamoDBConnection(AWSQueryConnection):
corresponding value of LastEvaluatedKey .
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
:type total_segments: integer
:param total_segments: For a parallel Scan request, TotalSegments
@@ -1219,6 +1698,8 @@ class DynamoDBConnection(AWSQueryConnection):
params['Select'] = select
if scan_filter is not None:
params['ScanFilter'] = scan_filter
+ if conditional_operator is not None:
+ params['ConditionalOperator'] = conditional_operator
if exclusive_start_key is not None:
params['ExclusiveStartKey'] = exclusive_start_key
if return_consumed_capacity is not None:
@@ -1231,8 +1712,8 @@ class DynamoDBConnection(AWSQueryConnection):
body=json.dumps(params))
def update_item(self, table_name, key, attribute_updates=None,
- expected=None, return_values=None,
- return_consumed_capacity=None,
+ expected=None, conditional_operator=None,
+ return_values=None, return_consumed_capacity=None,
return_item_collection_metrics=None):
"""
Edits an existing item's attributes, or inserts a new item if
@@ -1250,8 +1731,12 @@ class DynamoDBConnection(AWSQueryConnection):
:param table_name: The name of the table containing the item to update.
:type key: map
- :param key: The primary key that defines the item. Each element
+ :param key: The primary key of the item to be updated. Each element
consists of an attribute name and a value for that attribute.
+ For the primary key, you must provide all of the attributes. For
+ example, with a hash type primary key, you only need to specify the
+ hash attribute. For a hash-and-range type primary key, you must
+ specify both the hash attribute and the range attribute.
:type attribute_updates: map
:param attribute_updates: The names of attributes to be modified, the
@@ -1294,16 +1779,16 @@ class DynamoDBConnection(AWSQueryConnection):
If Value is a negative number, then it is subtracted from the
existing attribute. If you use `ADD` to increment or decrement a
number value for an item that doesn't exist before the update,
- Amazon DynamoDB uses 0 as the initial value. In addition, if you
- use `ADD` to update an existing item, and intend to increment or
- decrement an attribute value which does not yet exist, Amazon
- DynamoDB uses `0` as the initial value. For example, suppose that
- the item you want to update does not yet have an attribute named
- itemcount , but you decide to `ADD` the number `3` to this
- attribute anyway, even though it currently does not exist. Amazon
- DynamoDB will create the itemcount attribute, set its initial value
- to `0`, and finally add `3` to it. The result will be a new
- itemcount attribute in the item, with a value of `3`.
+ DynamoDB uses 0 as the initial value. In addition, if you use `ADD`
+ to update an existing item, and intend to increment or decrement an
+ attribute value which does not yet exist, DynamoDB uses `0` as the
+ initial value. For example, suppose that the item you want to
+ update does not yet have an attribute named itemcount , but you
+ decide to `ADD` the number `3` to this attribute anyway, even
+ though it currently does not exist. DynamoDB will create the
+ itemcount attribute, set its initial value to `0`, and finally add
+ `3` to it. The result will be a new itemcount attribute in the
+ item, with a value of `3`.
+ If the existing data type is a set, and if the Value is also a set,
then the Value is added to the existing set. (This is a set
operation, not mathematical addition.) For example, if the
@@ -1319,13 +1804,13 @@ class DynamoDBConnection(AWSQueryConnection):
number or is a set. Do not use `ADD` for any other data types.
**If no item with the specified Key is found:**
- + `PUT` - Amazon DynamoDB creates a new item with the specified primary
- key, and then adds the attribute.
+ + `PUT` - DynamoDB creates a new item with the specified primary key,
+ and then adds the attribute.
+ `DELETE` - Nothing happens; there is no attribute to delete.
- + `ADD` - Amazon DynamoDB creates an item with the supplied primary key
- and number (or set of numbers) for the attribute value. The only
- data types allowed are number and number set; no other data types
- can be specified.
+ + `ADD` - DynamoDB creates an item with the supplied primary key and
+ number (or set of numbers) for the attribute value. The only data
+ types allowed are number and number set; no other data types can be
+ specified.
@@ -1334,48 +1819,175 @@ class DynamoDBConnection(AWSQueryConnection):
the table's attribute definition.
:type expected: map
- :param expected: A map of attribute/condition pairs. This is the
- conditional block for the UpdateItem operation. All the conditions
- must be met for the operation to succeed.
- Expected allows you to provide an attribute name, and whether or not
- Amazon DynamoDB should check to see if the attribute value already
- exists; or if the attribute value exists and has a particular value
- before changing it.
+ :param expected:
+ A map of attribute/condition pairs. This is the conditional block for
+ the UpdateItem operation.
- Each item in Expected represents an attribute name for Amazon DynamoDB
- to check, along with the following:
+ Each element of Expected consists of an attribute name, a comparison
+ operator, and one or more values. DynamoDB compares the attribute
+ with the value(s) you supplied, using the comparison operator. For
+ each Expected element, the result of the evaluation is either true
+ or false.
+
+ If you specify more than one element in the Expected map, then by
+ default all of the conditions must evaluate to true. In other
+ words, the conditions are ANDed together. (You can use the
+ ConditionalOperator parameter to OR the conditions instead. If you
+ do this, then at least one of the conditions must evaluate to true,
+ rather than all of them.)
+
+ If the Expected map evaluates to true, then the conditional operation
+ succeeds; otherwise, it fails.
+
+ Each item in Expected represents an attribute name for DynamoDB to
+ check, along with an AttributeValueList and a ComparisonOperator :
- + Value - The attribute value for Amazon DynamoDB to check.
- + Exists - Causes Amazon DynamoDB to evaluate the value before
- attempting a conditional operation:
+ + AttributeValueList - One or more values to evaluate against the
+ supplied attribute. The number of values in the list depends on the
+ ComparisonOperator being used. For type Number, value comparisons
+ are numeric. String value comparisons for greater than, equals, or
+ less than are based on ASCII character code values. For example,
+ `a` is greater than `A`, and `aa` is greater than `B`. For a list
+ of code values, see
+ `http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters`_.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values, for example when
+ evaluating query expressions.
+ + ComparisonOperator - A comparator for evaluating attributes in the
+ AttributeValueList . When performing the comparison, DynamoDB uses
+ strongly consistent reads. The following comparison operators are
+ available: `EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL |
+ CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN` The following
+ are descriptions of each comparison operator.
- + If Exists is `True`, Amazon DynamoDB will check to see if that
- attribute value already exists in the table. If it is found, then
- the operation succeeds. If it is not found, the operation fails
- with a ConditionalCheckFailedException .
- + If Exists is `False`, Amazon DynamoDB assumes that the attribute
- value does not exist in the table. If in fact the value does not
- exist, then the assumption is valid and the operation succeeds. If
- the value is found, despite the assumption that it does not exist,
- the operation fails with a ConditionalCheckFailedException .
- The default setting for Exists is `True`. If you supply a Value all by
- itself, Amazon DynamoDB assumes the attribute exists: You don't
- have to set Exists to `True`, because it is implied. Amazon
- DynamoDB returns a ValidationException if:
-
- + Exists is `True` but there is no Value to check. (You expect a value
- to exist, but don't specify what that value is.)
- + Exists is `False` but you also specify a Value . (You cannot expect
- an attribute to have a value, while also expecting it not to
- exist.)
+ + `EQ` : Equal. AttributeValueList can contain only one AttributeValue
+ of type String, Number, Binary, String Set, Number Set, or Binary
+ Set. If an item contains an AttributeValue of a different type than
+ the one specified in the request, the value does not match. For
+ example, `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}`
+ does not equal `{"NS":["6", "2", "1"]}`. >
+ + `NE` : Not equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, Binary, String Set, Number
+ Set, or Binary Set. If an item contains an AttributeValue of a
+ different type than the one specified in the request, the value
+ does not match. For example, `{"S":"6"}` does not equal
+ `{"N":"6"}`. Also, `{"N":"6"}` does not equal `{"NS":["6", "2",
+ "1"]}`. >
+ + `LE` : Less than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `LT` : Less than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GE` : Greater than or equal. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `GT` : Greater than. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If an
+ item contains an AttributeValue of a different type than the one
+ specified in the request, the value does not match. For example,
+ `{"S":"6"}` does not equal `{"N":"6"}`. Also, `{"N":"6"}` does not
+ compare to `{"NS":["6", "2", "1"]}`. >
+ + `NOT_NULL` : The attribute exists.
+ + `NULL` : The attribute does not exist.
+ + `CONTAINS` : checks for a subsequence, or value in a set.
+ AttributeValueList can contain only one AttributeValue of type
+ String, Number, or Binary (not a set). If the target attribute of
+ the comparison is a String, then the operation checks for a
+ substring match. If the target attribute of the comparison is
+ Binary, then the operation looks for a subsequence of the target
+ that matches the input. If the target attribute of the comparison
+ is a set ("SS", "NS", or "BS"), then the operation checks for a
+ member of the set (not as a substring).
+ + `NOT_CONTAINS` : checks for absence of a subsequence, or absence of a
+ value in a set. AttributeValueList can contain only one
+ AttributeValue of type String, Number, or Binary (not a set). If
+ the target attribute of the comparison is a String, then the
+ operation checks for the absence of a substring match. If the
+ target attribute of the comparison is Binary, then the operation
+ checks for the absence of a subsequence of the target that matches
+ the input. If the target attribute of the comparison is a set
+ ("SS", "NS", or "BS"), then the operation checks for the absence of
+ a member of the set (not as a substring).
+ + `BEGINS_WITH` : checks for a prefix. AttributeValueList can contain
+ only one AttributeValue of type String or Binary (not a Number or a
+ set). The target attribute of the comparison must be a String or
+ Binary (not a Number or a set). >
+ + `IN` : checks for exact matches. AttributeValueList can contain more
+ than one AttributeValue of type String, Number, or Binary (not a
+ set). The target attribute of the comparison must be of the same
+ type and exact value to match. A String never matches a String set.
+ + `BETWEEN` : Greater than or equal to the first value, and less than
+ or equal to the second value. AttributeValueList must contain two
+ AttributeValue elements of the same type, either String, Number, or
+ Binary (not a set). A target attribute matches if the target value
+ is greater than, or equal to, the first element and less than, or
+ equal to, the second element. If an item contains an AttributeValue
+ of a different type than the one specified in the request, the
+ value does not match. For example, `{"S":"6"}` does not compare to
+ `{"N":"6"}`. Also, `{"N":"6"}` does not compare to `{"NS":["6",
+ "2", "1"]}`
- If you specify more than one condition for Exists , then all of the
- conditions must evaluate to true. (In other words, the conditions
- are ANDed together.) Otherwise, the conditional operation will
- fail.
+ For usage examples of AttributeValueList and ComparisonOperator , see
+ `Conditional Expressions`_ in the Amazon DynamoDB Developer Guide.
+
+
+ For backward compatibility with previous DynamoDB releases, the
+ following parameters can be used instead of AttributeValueList and
+ ComparisonOperator :
+
+
+ + Value - A value for DynamoDB to compare with an attribute.
+ + Exists - Causes DynamoDB to evaluate the value before attempting the
+ conditional operation:
+
+ + If Exists is `True`, DynamoDB will check to see if that attribute
+ value already exists in the table. If it is found, then the
+ condition evaluates to true; otherwise the condition evaluate to
+ false.
+ + If Exists is `False`, DynamoDB assumes that the attribute value does
+ not exist in the table. If in fact the value does not exist, then
+ the assumption is valid and the condition evaluates to true. If the
+ value is found, despite the assumption that it does not exist, the
+ condition evaluates to false.
+
+
+
+ Even though DynamoDB continues to accept the Value and Exists
+ parameters, they are now deprecated. We recommend that you use
+ AttributeValueList and ComparisonOperator instead, since they allow
+ you to construct a much wider range of conditions.
+
+ The Value and Exists parameters are incompatible with
+ AttributeValueList and ComparisonOperator . If you attempt to use
+ both sets of parameters at once, DynamoDB will throw a
+ ValidationException .
+
+ :type conditional_operator: string
+ :param conditional_operator: A logical operator to apply to the
+ conditions in the Expected map:
+
+ + `AND` - If all of the conditions evaluate to true, then the entire
+ map evaluates to true.
+ + `OR` - If at least one of the conditions evaluate to true, then the
+ entire map evaluates to true.
+
+
+ If you omit ConditionalOperator , then `AND` is the default.
+
+ The operation will succeed only if the entire map evaluates to true.
:type return_values: string
:param return_values:
@@ -1396,15 +2008,17 @@ class DynamoDBConnection(AWSQueryConnection):
returned.
:type return_consumed_capacity: string
- :param return_consumed_capacity: If set to `TOTAL`, ConsumedCapacity is
- included in the response; if set to `NONE` (the default),
- ConsumedCapacity is not included.
+ :param return_consumed_capacity: If set to `TOTAL`, the response
+ includes ConsumedCapacity data for tables and indexes. If set to
+ `INDEXES`, the response includes ConsumedCapacity for indexes. If
+ set to `NONE` (the default), ConsumedCapacity is not included in
+ the response.
:type return_item_collection_metrics: string
:param return_item_collection_metrics: If set to `SIZE`, statistics
about item collections, if any, that were modified during the
operation are returned in the response. If set to `NONE` (the
- default), no statistics are returned..
+ default), no statistics are returned.
"""
params = {'TableName': table_name, 'Key': key, }
@@ -1412,6 +2026,8 @@ class DynamoDBConnection(AWSQueryConnection):
params['AttributeUpdates'] = attribute_updates
if expected is not None:
params['Expected'] = expected
+ if conditional_operator is not None:
+ params['ConditionalOperator'] = conditional_operator
if return_values is not None:
params['ReturnValues'] = return_values
if return_consumed_capacity is not None:
@@ -1427,7 +2043,7 @@ class DynamoDBConnection(AWSQueryConnection):
Updates the provisioned throughput for the given table.
Setting the throughput for a table helps you manage
performance and is part of the provisioned throughput feature
- of Amazon DynamoDB.
+ of DynamoDB.
The provisioned throughput values can be upgraded or
downgraded based on the maximums and minimums listed in the
@@ -1442,22 +2058,23 @@ class DynamoDBConnection(AWSQueryConnection):
table returns to the `ACTIVE` state after the UpdateTable
operation.
- You cannot add, modify or delete local secondary indexes using
- UpdateTable . Local secondary indexes can only be defined at
- table creation time.
+ You cannot add, modify or delete indexes using UpdateTable .
+ Indexes can only be defined at table creation time.
:type table_name: string
:param table_name: The name of the table to be updated.
:type provisioned_throughput: dict
- :param provisioned_throughput: The provisioned throughput settings for
- the specified table. The settings can be modified using the
- UpdateTable operation.
+ :param provisioned_throughput: Represents the provisioned throughput
+ settings for a specified table or index. The settings can be
+ modified using the UpdateTable operation.
For current minimum and maximum provisioned throughput values, see
`Limits`_ in the Amazon DynamoDB Developer Guide.
:type global_secondary_index_updates: list
- :param global_secondary_index_updates:
+ :param global_secondary_index_updates: An array of one or more global
+ secondary indexes on the table, together with provisioned
+ throughput settings for each index.
"""
params = {'TableName': table_name, }
@@ -1481,7 +2098,7 @@ class DynamoDBConnection(AWSQueryConnection):
response = self._mexe(http_request, sender=None,
override_num_retries=self.NumberRetries,
retry_handler=self._retry_handler)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -1497,7 +2114,7 @@ class DynamoDBConnection(AWSQueryConnection):
status = None
boto.log.debug("Saw HTTP status: %s" % response.status)
if response.status == 400:
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
data = json.loads(response_body)
if 'ProvisionedThroughputExceededException' in data.get('__type'):
@@ -1506,7 +2123,7 @@ class DynamoDBConnection(AWSQueryConnection):
'ProvisionedThroughputExceededException',
i
)
- next_sleep = self._exponential_time(i)
+ next_sleep = self._truncated_exponential_time(i)
i += 1
status = (msg, i, next_sleep)
if i == self.NumberRetries:
@@ -1533,12 +2150,13 @@ class DynamoDBConnection(AWSQueryConnection):
if actual_crc32 != expected_crc32:
msg = ("The calculated checksum %s did not match the expected "
"checksum %s" % (actual_crc32, expected_crc32))
- status = (msg, i + 1, self._exponential_time(i))
+ status = (msg, i + 1, self._truncated_exponential_time(i))
return status
- def _exponential_time(self, i):
+ def _truncated_exponential_time(self, i):
if i == 0:
next_sleep = 0
else:
- next_sleep = 0.05 * (2 ** i)
+ next_sleep = min(0.05 * (2 ** i),
+ boto.config.get('Boto', 'max_retry_delay', 60))
return next_sleep
diff --git a/awx/lib/site-packages/boto/dynamodb2/results.py b/awx/lib/site-packages/boto/dynamodb2/results.py
index 98da8a6af7..3d80ecf317 100644
--- a/awx/lib/site-packages/boto/dynamodb2/results.py
+++ b/awx/lib/site-packages/boto/dynamodb2/results.py
@@ -52,7 +52,7 @@ class ResultSet(object):
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
self._offset += 1
if self._offset >= len(self._results):
@@ -78,6 +78,8 @@ class ResultSet(object):
else:
raise StopIteration()
+ next = __next__
+
def to_call(self, the_callable, *args, **kwargs):
"""
Sets up the callable & any arguments to run it with.
@@ -106,7 +108,7 @@ class ResultSet(object):
# DDB api calls use (which limit page size, not the overall result set).
self._limit = kwargs.pop('limit', None)
- if self._limit < 0:
+ if self._limit is not None and self._limit < 0:
self._limit = None
self.the_callable = the_callable
@@ -130,7 +132,7 @@ class ResultSet(object):
# If the page size is greater than limit set them
# to the same value
- if self._limit and self._max_page_size > self._limit:
+ if self._limit and self._max_page_size and self._max_page_size > self._limit:
self._max_page_size = self._limit
# Put in the max page size.
diff --git a/awx/lib/site-packages/boto/dynamodb2/table.py b/awx/lib/site-packages/boto/dynamodb2/table.py
index 338ced19a0..79cf797537 100644
--- a/awx/lib/site-packages/boto/dynamodb2/table.py
+++ b/awx/lib/site-packages/boto/dynamodb2/table.py
@@ -7,7 +7,8 @@ from boto.dynamodb2.fields import (HashKey, RangeKey,
from boto.dynamodb2.items import Item
from boto.dynamodb2.layer1 import DynamoDBConnection
from boto.dynamodb2.results import ResultSet, BatchGetResultSet
-from boto.dynamodb2.types import Dynamizer, FILTER_OPERATORS, QUERY_OPERATORS
+from boto.dynamodb2.types import (Dynamizer, FILTER_OPERATORS, QUERY_OPERATORS,
+ STRING)
from boto.exception import JSONResponseError
@@ -232,18 +233,29 @@ class Table(object):
)
return table
- def _introspect_schema(self, raw_schema):
+ def _introspect_schema(self, raw_schema, raw_attributes=None):
"""
Given a raw schema structure back from a DynamoDB response, parse
out & build the high-level Python objects that represent them.
"""
schema = []
+ sane_attributes = {}
+
+ if raw_attributes:
+ for field in raw_attributes:
+ sane_attributes[field['AttributeName']] = field['AttributeType']
for field in raw_schema:
+ data_type = sane_attributes.get(field['AttributeName'], STRING)
+
if field['KeyType'] == 'HASH':
- schema.append(HashKey(field['AttributeName']))
+ schema.append(
+ HashKey(field['AttributeName'], data_type=data_type)
+ )
elif field['KeyType'] == 'RANGE':
- schema.append(RangeKey(field['AttributeName']))
+ schema.append(
+ RangeKey(field['AttributeName'], data_type=data_type)
+ )
else:
raise exceptions.UnknownSchemaFieldError(
"%s was seen, but is unknown. Please report this at "
@@ -280,7 +292,7 @@ class Table(object):
)
name = field['IndexName']
- kwargs['parts'] = self._introspect_schema(field['KeySchema'])
+ kwargs['parts'] = self._introspect_schema(field['KeySchema'], None)
indexes.append(index_klass(name, **kwargs))
return indexes
@@ -319,7 +331,8 @@ class Table(object):
if not self.schema:
# Since we have the data, build the schema.
raw_schema = result['Table'].get('KeySchema', [])
- self.schema = self._introspect_schema(raw_schema)
+ raw_attributes = result['Table'].get('AttributeDefinitions', [])
+ self.schema = self._introspect_schema(raw_schema, raw_attributes)
if not self.indexes:
# Build the index information as well.
@@ -635,16 +648,35 @@ class Table(object):
self.connection.update_item(self.table_name, raw_key, item_data, **kwargs)
return True
- def delete_item(self, **kwargs):
+ def delete_item(self, expected=None, conditional_operator=None, **kwargs):
"""
- Deletes an item in DynamoDB.
+ Deletes a single item. You can perform a conditional delete operation
+ that deletes the item if it exists, or if it has an expected attribute
+ value.
+
+ Conditional deletes are useful for only deleting items if specific
+ conditions are met. If those conditions are met, DynamoDB performs
+ the delete. Otherwise, the item is not deleted.
+
+ To specify the expected attribute values of the item, you can pass a
+ dictionary of conditions to ``expected``. Each condition should follow
+ the pattern ``__=``.
**IMPORTANT** - Be careful when using this method, there is no undo.
To specify the key of the item you'd like to get, you can specify the
key attributes as kwargs.
- Returns ``True`` on success.
+ Optionally accepts an ``expected`` parameter which is a dictionary of
+ expected attribute value conditions.
+
+ Optionally accepts a ``conditional_operator`` which applies to the
+ expected attribute value conditions:
+
+ + `AND` - If all of the conditions evaluate to true (default)
+ + `OR` - True if at least one condition evaluates to true
+
+ Returns ``True`` on success, ``False`` on failed conditional delete.
Example::
@@ -663,9 +695,21 @@ class Table(object):
... })
True
+ # Conditional delete
+ >>> users.delete_item(username='johndoe',
+ ... expected={'balance__eq': 0})
+ True
"""
+ expected = self._build_filters(expected, using=FILTER_OPERATORS)
raw_key = self._encode_keys(kwargs)
- self.connection.delete_item(self.table_name, raw_key)
+
+ try:
+ self.connection.delete_item(self.table_name, raw_key,
+ expected=expected,
+ conditional_operator=conditional_operator)
+ except exceptions.ConditionalCheckFailedException:
+ return False
+
return True
def get_key_fields(self):
@@ -744,6 +788,9 @@ class Table(object):
An internal method for taking query/scan-style ``**kwargs`` & turning
them into the raw structure DynamoDB expects for filtering.
"""
+ if filter_kwargs is None:
+ return
+
filters = {}
for field_and_op, value in filter_kwargs.items():
@@ -803,17 +850,34 @@ class Table(object):
def query(self, limit=None, index=None, reverse=False, consistent=False,
attributes=None, max_page_size=None, **filter_kwargs):
"""
+ **WARNING:** This method is provided **strictly** for
+ backward-compatibility. It returns results in an incorrect order.
+
+ If you are writing new code, please use ``Table.query_2``.
+ """
+ reverse = not reverse
+ return self.query_2(limit=limit, index=index, reverse=reverse,
+ consistent=consistent, attributes=attributes,
+ max_page_size=max_page_size, **filter_kwargs)
+
+ def query_2(self, limit=None, index=None, reverse=False,
+ consistent=False, attributes=None, max_page_size=None,
+ query_filter=None, conditional_operator=None,
+ **filter_kwargs):
+ """
Queries for a set of matching items in a DynamoDB table.
Queries can be performed against a hash key, a hash+range key or
- against any data stored in your local secondary indexes.
+ against any data stored in your local secondary indexes. Query filters
+ can be used to filter on arbitrary fields.
**Note** - You can not query against arbitrary fields within the data
- stored in DynamoDB.
+ stored in DynamoDB unless you specify ``query_filter`` values.
To specify the filters of the items you'd like to get, you can specify
the filters as kwargs. Each filter kwarg should follow the pattern
- ``__=``.
+ ``__=``. Query filters
+ are specified in the same way.
Optionally accepts a ``limit`` parameter, which should be an integer
count of the total number of items to return. (Default: ``None`` -
@@ -824,7 +888,7 @@ class Table(object):
(Default: ``None``)
Optionally accepts a ``reverse`` parameter, which will present the
- results in reverse order. (Default: ``None`` - normal order)
+ results in reverse order. (Default: ``False`` - normal order)
Optionally accepts a ``consistent`` parameter, which should be a
boolean. If you provide ``True``, it will force a consistent read of
@@ -842,6 +906,15 @@ class Table(object):
the scan from drowning out other queries. (Default: ``None`` -
fetch as many as DynamoDB will return)
+ Optionally accepts a ``query_filter`` which is a dictionary of filter
+ conditions against any arbitrary field in the returned data.
+
+ Optionally accepts a ``conditional_operator`` which applies to the
+ query filter conditions:
+
+ + `AND` - True if all filter conditions evaluate to true (default)
+ + `OR` - True if at least one filter condition evaluates to true
+
Returns a ``ResultSet``, which transparently handles the pagination of
results you get back.
@@ -880,6 +953,18 @@ class Table(object):
'John'
'Fred'
+ # Filter by non-indexed field(s)
+ >>> results = users.query(
+ ... last_name__eq='Doe',
+ ... reverse=True,
+ ... query_filter={
+ ... 'first_name__beginswith': 'A'
+ ... }
+ ... )
+ >>> for res in results:
+ ... print res['first_name'] + ' ' + res['last_name']
+ 'Alice Doe'
+
"""
if self.schema:
if len(self.schema) == 1:
@@ -908,20 +993,26 @@ class Table(object):
'consistent': consistent,
'select': select,
'attributes_to_get': attributes,
+ 'query_filter': query_filter,
+ 'conditional_operator': conditional_operator,
})
results.to_call(self._query, **kwargs)
return results
- def query_count(self, index=None, consistent=False, **filter_kwargs):
+ def query_count(self, index=None, consistent=False, conditional_operator=None,
+ query_filter=None, scan_index_forward=True, limit=None,
+ **filter_kwargs):
"""
Queries the exact count of matching items in a DynamoDB table.
Queries can be performed against a hash key, a hash+range key or
- against any data stored in your local secondary indexes.
+ against any data stored in your local secondary indexes. Query filters
+ can be used to filter on arbitrary fields.
To specify the filters of the items you'd like to get, you can specify
the filters as kwargs. Each filter kwarg should follow the pattern
- ``__=``.
+ ``__=``. Query filters
+ are specified in the same way.
Optionally accepts an ``index`` parameter, which should be a string of
name of the local secondary index you want to query against.
@@ -932,9 +1023,34 @@ class Table(object):
the data (more expensive). (Default: ``False`` - use eventually
consistent reads)
+ Optionally accepts a ``query_filter`` which is a dictionary of filter
+ conditions against any arbitrary field in the returned data.
+
+ Optionally accepts a ``conditional_operator`` which applies to the
+ query filter conditions:
+
+ + `AND` - True if all filter conditions evaluate to true (default)
+ + `OR` - True if at least one filter condition evaluates to true
+
Returns an integer which represents the exact amount of matched
items.
+ :type scan_index_forward: boolean
+ :param scan_index_forward: Specifies ascending (true) or descending
+ (false) traversal of the index. DynamoDB returns results reflecting
+ the requested order determined by the range key. If the data type
+ is Number, the results are returned in numeric order. For String,
+ the results are returned in order of ASCII character code values.
+ For Binary, DynamoDB treats each byte of the binary data as
+ unsigned when it compares binary values.
+
+ If ScanIndexForward is not specified, the results are returned in
+ ascending order.
+
+ :type limit: integer
+ :param limit: The maximum number of items to evaluate (not necessarily
+ the number of matching items).
+
Example::
# Look for last names equal to "Doe".
@@ -956,18 +1072,27 @@ class Table(object):
using=QUERY_OPERATORS
)
+ built_query_filter = self._build_filters(
+ query_filter,
+ using=FILTER_OPERATORS
+ )
+
raw_results = self.connection.query(
self.table_name,
index_name=index,
consistent_read=consistent,
select='COUNT',
key_conditions=key_conditions,
+ query_filter=built_query_filter,
+ conditional_operator=conditional_operator,
+ limit=limit,
+ scan_index_forward=scan_index_forward,
)
return int(raw_results.get('Count', 0))
def _query(self, limit=None, index=None, reverse=False, consistent=False,
exclusive_start_key=None, select=None, attributes_to_get=None,
- **filter_kwargs):
+ query_filter=None, conditional_operator=None, **filter_kwargs):
"""
The internal method that performs the actual queries. Used extensively
by ``ResultSet`` to perform each (paginated) request.
@@ -975,12 +1100,15 @@ class Table(object):
kwargs = {
'limit': limit,
'index_name': index,
- 'scan_index_forward': reverse,
'consistent_read': consistent,
'select': select,
- 'attributes_to_get': attributes_to_get
+ 'attributes_to_get': attributes_to_get,
+ 'conditional_operator': conditional_operator,
}
+ if reverse:
+ kwargs['scan_index_forward'] = False
+
if exclusive_start_key:
kwargs['exclusive_start_key'] = {}
@@ -994,6 +1122,11 @@ class Table(object):
using=QUERY_OPERATORS
)
+ kwargs['query_filter'] = self._build_filters(
+ query_filter,
+ using=FILTER_OPERATORS
+ )
+
raw_results = self.connection.query(
self.table_name,
**kwargs
@@ -1020,13 +1153,14 @@ class Table(object):
}
def scan(self, limit=None, segment=None, total_segments=None,
- max_page_size=None, attributes=None, **filter_kwargs):
+ max_page_size=None, attributes=None, conditional_operator=None,
+ **filter_kwargs):
"""
Scans across all items within a DynamoDB table.
Scans can be performed against a hash key or a hash+range key. You can
additionally filter the results after the table has been read but
- before the response is returned.
+ before the response is returned by using query filters.
To specify the filters of the items you'd like to get, you can specify
the filters as kwargs. Each filter kwarg should follow the pattern
@@ -1091,12 +1225,14 @@ class Table(object):
'segment': segment,
'total_segments': total_segments,
'attributes': attributes,
+ 'conditional_operator': conditional_operator,
})
results.to_call(self._scan, **kwargs)
return results
def _scan(self, limit=None, exclusive_start_key=None, segment=None,
- total_segments=None, attributes=None, **filter_kwargs):
+ total_segments=None, attributes=None, conditional_operator=None,
+ **filter_kwargs):
"""
The internal method that performs the actual scan. Used extensively
by ``ResultSet`` to perform each (paginated) request.
@@ -1106,6 +1242,7 @@ class Table(object):
'segment': segment,
'total_segments': total_segments,
'attributes_to_get': attributes,
+ 'conditional_operator': conditional_operator,
}
if exclusive_start_key:
@@ -1146,7 +1283,7 @@ class Table(object):
'last_key': last_key,
}
- def batch_get(self, keys, consistent=False):
+ def batch_get(self, keys, consistent=False, attributes=None):
"""
Fetches many specific items in batch from a table.
@@ -1157,6 +1294,10 @@ class Table(object):
boolean. If you provide ``True``, a strongly consistent read will be
used. (Default: False)
+ Optionally accepts an ``attributes`` parameter, which should be a
+ tuple. If you provide any attributes only these will be fetched
+ from DynamoDB.
+
Returns a ``ResultSet``, which transparently handles the pagination of
results you get back.
@@ -1183,10 +1324,10 @@ class Table(object):
# We pass the keys to the constructor instead, so it can maintain it's
# own internal state as to what keys have been processed.
results = BatchGetResultSet(keys=keys, max_batch_get=self.max_batch_get)
- results.to_call(self._batch_get, consistent=False)
+ results.to_call(self._batch_get, consistent=consistent, attributes=attributes)
return results
- def _batch_get(self, keys, consistent=False):
+ def _batch_get(self, keys, consistent=False, attributes=None):
"""
The internal method that performs the actual batch get. Used extensively
by ``BatchGetResultSet`` to perform each (paginated) request.
@@ -1200,6 +1341,9 @@ class Table(object):
if consistent:
items[self.table_name]['ConsistentRead'] = True
+ if attributes is not None:
+ items[self.table_name]['AttributesToGet'] = attributes
+
for key_data in keys:
raw_key = {}
diff --git a/awx/lib/site-packages/boto/ec2/autoscale/__init__.py b/awx/lib/site-packages/boto/ec2/autoscale/__init__.py
index 2a0f692727..fc0534b421 100644
--- a/awx/lib/site-packages/boto/ec2/autoscale/__init__.py
+++ b/awx/lib/site-packages/boto/ec2/autoscale/__init__.py
@@ -45,6 +45,7 @@ from boto.ec2.autoscale.instance import Instance
from boto.ec2.autoscale.scheduled import ScheduledUpdateGroupAction
from boto.ec2.autoscale.tag import Tag
from boto.ec2.autoscale.limits import AccountLimits
+from boto.compat import six
RegionData = load_regions().get('autoscaling', {})
@@ -134,15 +135,15 @@ class AutoScaleConnection(AWSQueryConnection):
['us-east-1b',...]
"""
# different from EC2 list params
- for i in xrange(1, len(items) + 1):
+ for i in range(1, len(items) + 1):
if isinstance(items[i - 1], dict):
- for k, v in items[i - 1].iteritems():
+ for k, v in six.iteritems(items[i - 1]):
if isinstance(v, dict):
- for kk, vv in v.iteritems():
+ for kk, vv in six.iteritems(v):
params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv
else:
params['%s.member.%d.%s' % (label, i, k)] = v
- elif isinstance(items[i - 1], basestring):
+ elif isinstance(items[i - 1], six.string_types):
params['%s.member.%d' % (label, i)] = items[i - 1]
def _update_group(self, op, as_group):
@@ -221,7 +222,7 @@ class AutoScaleConnection(AWSQueryConnection):
if launch_config.key_name:
params['KeyName'] = launch_config.key_name
if launch_config.user_data:
- params['UserData'] = base64.b64encode(launch_config.user_data)
+ params['UserData'] = base64.b64encode(launch_config.user_data).decode('utf-8')
if launch_config.kernel_id:
params['KernelId'] = launch_config.kernel_id
if launch_config.ramdisk_id:
diff --git a/awx/lib/site-packages/boto/ec2/buyreservation.py b/awx/lib/site-packages/boto/ec2/buyreservation.py
index fcd8a77c81..7a0e6751d8 100644
--- a/awx/lib/site-packages/boto/ec2/buyreservation.py
+++ b/awx/lib/site-packages/boto/ec2/buyreservation.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import boto.ec2
from boto.sdb.db.property import StringProperty, IntegerProperty
from boto.manage import propget
@@ -66,19 +65,19 @@ if __name__ == "__main__":
obj.get(params)
offerings = obj.ec2.get_all_reserved_instances_offerings(instance_type=params['instance_type'],
availability_zone=params['zone'].name)
- print '\nThe following Reserved Instances Offerings are available:\n'
+ print('\nThe following Reserved Instances Offerings are available:\n')
for offering in offerings:
offering.describe()
prop = StringProperty(name='offering', verbose_name='Offering',
choices=offerings)
offering = propget.get(prop)
- print '\nYou have chosen this offering:'
+ print('\nYou have chosen this offering:')
offering.describe()
unit_price = float(offering.fixed_price)
total_price = unit_price * params['quantity']
- print '!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price)
+ print('!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price))
answer = raw_input('Are you sure you want to do this? If so, enter YES: ')
if answer.strip().lower() == 'yes':
offering.purchase(params['quantity'])
else:
- print 'Purchase cancelled'
+ print('Purchase cancelled')
diff --git a/awx/lib/site-packages/boto/ec2/cloudwatch/__init__.py b/awx/lib/site-packages/boto/ec2/cloudwatch/__init__.py
index ba3376b131..0c9115e8b6 100644
--- a/awx/lib/site-packages/boto/ec2/cloudwatch/__init__.py
+++ b/awx/lib/site-packages/boto/ec2/cloudwatch/__init__.py
@@ -23,7 +23,7 @@
This module provides an interface to the Elastic Compute Cloud (EC2)
CloudWatch service from AWS.
"""
-from boto.compat import json
+from boto.compat import json, map, six, zip
from boto.connection import AWSQueryConnection
from boto.ec2.cloudwatch.metric import Metric
from boto.ec2.cloudwatch.alarm import MetricAlarm, MetricAlarms, AlarmHistoryItem
@@ -110,7 +110,7 @@ class CloudWatchConnection(AWSQueryConnection):
for dim_name in dimension:
dim_value = dimension[dim_name]
if dim_value:
- if isinstance(dim_value, basestring):
+ if isinstance(dim_value, six.string_types):
dim_value = [dim_value]
for value in dim_value:
params['%s.%d.Name' % (prefix, i+1)] = dim_name
@@ -121,12 +121,12 @@ class CloudWatchConnection(AWSQueryConnection):
i += 1
def build_list_params(self, params, items, label):
- if isinstance(items, basestring):
+ if isinstance(items, six.string_types):
items = [items]
for index, item in enumerate(items):
i = index + 1
if isinstance(item, dict):
- for k, v in item.iteritems():
+ for k, v in six.iteritems(item):
params[label % (i, 'Name')] = k
if v is not None:
params[label % (i, 'Value')] = v
@@ -171,7 +171,7 @@ class CloudWatchConnection(AWSQueryConnection):
else:
raise Exception('Must specify a value or statistics to put.')
- for key, val in metric_data.iteritems():
+ for key, val in six.iteritems(metric_data):
params['MetricData.member.%d.%s' % (index + 1, key)] = val
def get_metric_statistics(self, period, start_time, end_time, metric_name,
diff --git a/awx/lib/site-packages/boto/ec2/cloudwatch/alarm.py b/awx/lib/site-packages/boto/ec2/cloudwatch/alarm.py
index 9215eb08c9..4989fb34a5 100644
--- a/awx/lib/site-packages/boto/ec2/cloudwatch/alarm.py
+++ b/awx/lib/site-packages/boto/ec2/cloudwatch/alarm.py
@@ -21,10 +21,10 @@
#
from datetime import datetime
-from boto.resultset import ResultSet
from boto.ec2.cloudwatch.listelement import ListElement
from boto.ec2.cloudwatch.dimension import Dimension
from boto.compat import json
+from boto.compat import six
class MetricAlarms(list):
@@ -57,7 +57,7 @@ class MetricAlarm(object):
'<': 'LessThanThreshold',
'<=': 'LessThanOrEqualToThreshold',
}
- _rev_cmp_map = dict((v, k) for (k, v) in _cmp_map.iteritems())
+ _rev_cmp_map = dict((v, k) for (k, v) in six.iteritems(_cmp_map))
def __init__(self, connection=None, name=None, metric=None,
namespace=None, statistic=None, comparison=None,
@@ -252,11 +252,11 @@ class MetricAlarm(object):
def add_alarm_action(self, action_arn=None):
"""
- Adds an alarm action, represented as an SNS topic, to this alarm.
+ Adds an alarm action, represented as an SNS topic, to this alarm.
What do do when alarm is triggered.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state ALARM.
"""
if not action_arn:
@@ -270,21 +270,21 @@ class MetricAlarm(object):
this alarm. What to do when the insufficient_data state is reached.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state INSUFFICIENT_DATA.
"""
if not action_arn:
return
self.actions_enabled = 'true'
self.insufficient_data_actions.append(action_arn)
-
+
def add_ok_action(self, action_arn=None):
"""
Adds an ok action, represented as an SNS topic, to this alarm. What
to do when the ok state is reached.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state INSUFFICIENT_DATA.
"""
if not action_arn:
@@ -320,4 +320,3 @@ class AlarmHistoryItem(object):
'%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
self.timestamp = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
-
diff --git a/awx/lib/site-packages/boto/ec2/cloudwatch/datapoint.py b/awx/lib/site-packages/boto/ec2/cloudwatch/datapoint.py
index d4350cef73..a33771a15e 100644
--- a/awx/lib/site-packages/boto/ec2/cloudwatch/datapoint.py
+++ b/awx/lib/site-packages/boto/ec2/cloudwatch/datapoint.py
@@ -37,4 +37,3 @@ class Datapoint(dict):
self[name] = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name != 'member':
self[name] = value
-
diff --git a/awx/lib/site-packages/boto/ec2/cloudwatch/dimension.py b/awx/lib/site-packages/boto/ec2/cloudwatch/dimension.py
index 42c8a88046..86ebb9c3f4 100644
--- a/awx/lib/site-packages/boto/ec2/cloudwatch/dimension.py
+++ b/awx/lib/site-packages/boto/ec2/cloudwatch/dimension.py
@@ -35,4 +35,3 @@ class Dimension(dict):
self[self._name] = [value]
else:
setattr(self, name, value)
-
diff --git a/awx/lib/site-packages/boto/ec2/cloudwatch/listelement.py b/awx/lib/site-packages/boto/ec2/cloudwatch/listelement.py
index 5be45992a0..2dd9cef034 100644
--- a/awx/lib/site-packages/boto/ec2/cloudwatch/listelement.py
+++ b/awx/lib/site-packages/boto/ec2/cloudwatch/listelement.py
@@ -27,5 +27,3 @@ class ListElement(list):
def endElement(self, name, value, connection):
if name == 'member':
self.append(value)
-
-
diff --git a/awx/lib/site-packages/boto/ec2/connection.py b/awx/lib/site-packages/boto/ec2/connection.py
index 9e2d4b1925..4fa205b66e 100644
--- a/awx/lib/site-packages/boto/ec2/connection.py
+++ b/awx/lib/site-packages/boto/ec2/connection.py
@@ -65,13 +65,14 @@ from boto.ec2.networkinterface import NetworkInterface
from boto.ec2.attributes import AccountAttribute, VPCAttribute
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
from boto.exception import EC2ResponseError
+from boto.compat import six
#boto.set_stream_logger('ec2')
class EC2Connection(AWSQueryConnection):
- APIVersion = boto.config.get('Boto', 'ec2_version', '2013-10-15')
+ APIVersion = boto.config.get('Boto', 'ec2_version', '2014-05-01')
DefaultRegionName = boto.config.get('Boto', 'ec2_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'ec2_region_endpoint',
'ec2.us-east-1.amazonaws.com')
@@ -109,7 +110,7 @@ class EC2Connection(AWSQueryConnection):
def get_params(self):
"""
- Returns a dictionary containing the value of of all of the keyword
+ Returns a dictionary containing the value of all of the keyword
arguments passed when constructing this connection.
"""
param_names = ['aws_access_key_id', 'aws_secret_access_key',
@@ -122,6 +123,9 @@ class EC2Connection(AWSQueryConnection):
return params
def build_filter_params(self, params, filters):
+ if not isinstance(filters, dict):
+ filters = dict(filters)
+
i = 1
for name in filters:
aws_name = name
@@ -266,7 +270,8 @@ class EC2Connection(AWSQueryConnection):
root_device_name=None, block_device_map=None,
dry_run=False, virtualization_type=None,
sriov_net_support=None,
- snapshot_id=None):
+ snapshot_id=None,
+ delete_root_volume_on_termination=False):
"""
Register an image.
@@ -315,6 +320,12 @@ class EC2Connection(AWSQueryConnection):
as root device for the image. Mutually exclusive with
block_device_map, requires root_device_name
+ :type delete_root_volume_on_termination: bool
+ :param delete_root_volume_on_termination: Whether to delete the root
+ volume of the image after instance termination. Only applies when
+ creating image from snapshot_id. Defaults to False. Note that
+ leaving volumes behind after instance termination is not free.
+
:rtype: string
:return: The new image id
"""
@@ -334,7 +345,8 @@ class EC2Connection(AWSQueryConnection):
if root_device_name:
params['RootDeviceName'] = root_device_name
if snapshot_id:
- root_vol = BlockDeviceType(snapshot_id=snapshot_id)
+ root_vol = BlockDeviceType(snapshot_id=snapshot_id,
+ delete_on_termination=delete_root_volume_on_termination)
block_device_map = BlockDeviceMapping()
block_device_map[root_device_name] = root_vol
if block_device_map:
@@ -602,15 +614,24 @@ class EC2Connection(AWSQueryConnection):
:rtype: list
:return: A list of :class:`boto.ec2.instance.Instance`
"""
- reservations = self.get_all_reservations(instance_ids=instance_ids,
- filters=filters,
- dry_run=dry_run,
- max_results=max_results)
- return [instance for reservation in reservations
- for instance in reservation.instances]
+ next_token = None
+ retval = []
+ while True:
+ reservations = self.get_all_reservations(instance_ids=instance_ids,
+ filters=filters,
+ dry_run=dry_run,
+ max_results=max_results,
+ next_token=next_token)
+ retval.extend([instance for reservation in reservations for
+ instance in reservation.instances])
+ next_token = reservations.next_token
+ if not next_token:
+ break
+
+ return retval
def get_all_reservations(self, instance_ids=None, filters=None,
- dry_run=False, max_results=None):
+ dry_run=False, max_results=None, next_token=None):
"""
Retrieve all the instance reservations associated with your account.
@@ -632,6 +653,10 @@ class EC2Connection(AWSQueryConnection):
:param max_results: The maximum number of paginated instance
items per response.
+ :type next_token: str
+ :param next_token: A string specifying the next paginated set
+ of results to return.
+
:rtype: list
:return: A list of :class:`boto.ec2.instance.Reservation`
"""
@@ -652,12 +677,15 @@ class EC2Connection(AWSQueryConnection):
params['DryRun'] = 'true'
if max_results is not None:
params['MaxResults'] = max_results
+ if next_token:
+ params['NextToken'] = next_token
return self.get_list('DescribeInstances', params,
[('item', Reservation)], verb='POST')
def get_all_instance_status(self, instance_ids=None,
max_results=None, next_token=None,
- filters=None, dry_run=False):
+ filters=None, dry_run=False,
+ include_all_instances=False):
"""
Retrieve all the instances in your account scheduled for maintenance.
@@ -685,6 +713,11 @@ class EC2Connection(AWSQueryConnection):
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
+ :type include_all_instances: bool
+ :param include_all_instances: Set to True if all
+ instances should be returned. (Only running
+ instances are included by default.)
+
:rtype: list
:return: A list of instances that have maintenance scheduled.
"""
@@ -699,6 +732,8 @@ class EC2Connection(AWSQueryConnection):
self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
+ if include_all_instances:
+ params['IncludeAllInstances'] = 'true'
return self.get_object('DescribeInstanceStatus', params,
InstanceStatusSet, verb='POST')
@@ -775,6 +810,9 @@ class EC2Connection(AWSQueryConnection):
* i2.2xlarge
* i2.4xlarge
* i2.8xlarge
+ * t2.micro
+ * t2.small
+ * t2.medium
:type placement: string
:param placement: The Availability Zone to launch the instance into.
@@ -788,7 +826,7 @@ class EC2Connection(AWSQueryConnection):
instances.
:type monitoring_enabled: bool
- :param monitoring_enabled: Enable CloudWatch monitoring on
+ :param monitoring_enabled: Enable detailed CloudWatch monitoring on
the instance.
:type subnet_id: string
@@ -856,9 +894,9 @@ class EC2Connection(AWSQueryConnection):
provide optimal EBS I/O performance. This optimization
isn't available with all instance types.
- :type network_interfaces: list
- :param network_interfaces: A list of
- :class:`boto.ec2.networkinterface.NetworkInterfaceSpecification`
+ :type network_interfaces: :class:`boto.ec2.networkinterface.NetworkInterfaceCollection`
+ :param network_interfaces: A NetworkInterfaceCollection data
+ structure containing the ENI specifications for the instance.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -889,7 +927,9 @@ class EC2Connection(AWSQueryConnection):
l.append(group)
self.build_list_params(params, l, 'SecurityGroup')
if user_data:
- params['UserData'] = base64.b64encode(user_data)
+ if isinstance(user_data, six.text_type):
+ user_data = user_data.encode('utf-8')
+ params['UserData'] = base64.b64encode(user_data).decode('utf-8')
if addressing_type:
params['AddressingType'] = addressing_type
if instance_type:
@@ -1470,6 +1510,9 @@ class EC2Connection(AWSQueryConnection):
* i2.2xlarge
* i2.4xlarge
* i2.8xlarge
+ * t2.micro
+ * t2.small
+ * t2.medium
:type placement: string
:param placement: The availability zone in which to launch
@@ -1484,7 +1527,7 @@ class EC2Connection(AWSQueryConnection):
instances
:type monitoring_enabled: bool
- :param monitoring_enabled: Enable CloudWatch monitoring on
+ :param monitoring_enabled: Enable detailed CloudWatch monitoring on
the instance.
:type subnet_id: string
@@ -2223,8 +2266,8 @@ class EC2Connection(AWSQueryConnection):
params['DryRun'] = 'true'
return self.get_status('ModifyVolumeAttribute', params, verb='POST')
- def create_volume(self, size, zone, snapshot=None,
- volume_type=None, iops=None, dry_run=False):
+ def create_volume(self, size, zone, snapshot=None, volume_type=None,
+ iops=None, encrypted=False, dry_run=False):
"""
Create a new EBS Volume.
@@ -2240,12 +2283,16 @@ class EC2Connection(AWSQueryConnection):
:type volume_type: string
:param volume_type: The type of the volume. (optional). Valid
- values are: standard | io1.
+ values are: standard | io1 | gp2.
:type iops: int
- :param iops: The provisioned IOPs you want to associate with
+ :param iops: The provisioned IOPS you want to associate with
this volume. (optional)
+ :type encrypted: bool
+ :param encrypted: Specifies whether the volume should be encrypted.
+ (optional)
+
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -2263,6 +2310,8 @@ class EC2Connection(AWSQueryConnection):
params['VolumeType'] = volume_type
if iops:
params['Iops'] = str(iops)
+ if encrypted:
+ params['Encrypted'] = 'true'
if dry_run:
params['DryRun'] = 'true'
return self.get_object('CreateVolume', params, Volume, verb='POST')
@@ -2790,7 +2839,7 @@ class EC2Connection(AWSQueryConnection):
keynames=[keyname],
dry_run=dry_run
)[0]
- except self.ResponseError, e:
+ except self.ResponseError as e:
if e.code == 'InvalidKeyPair.NotFound':
return None
else:
@@ -3819,7 +3868,7 @@ class EC2Connection(AWSQueryConnection):
def monitor_instances(self, instance_ids, dry_run=False):
"""
- Enable CloudWatch monitoring for the supplied instances.
+ Enable detailed CloudWatch monitoring for the supplied instances.
:type instance_id: list of strings
:param instance_id: The instance ids
@@ -3840,7 +3889,7 @@ class EC2Connection(AWSQueryConnection):
def monitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
- Enable CloudWatch monitoring for the supplied instance.
+ Enable detailed CloudWatch monitoring for the supplied instance.
:type instance_id: string
:param instance_id: The instance id
@@ -3876,7 +3925,7 @@ class EC2Connection(AWSQueryConnection):
def unmonitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
- Disable CloudWatch monitoring for the supplied instance.
+ Disable detailed CloudWatch monitoring for the supplied instance.
:type instance_id: string
:param instance_id: The instance id
@@ -4184,11 +4233,14 @@ class EC2Connection(AWSQueryConnection):
# Network Interface methods
- def get_all_network_interfaces(self, filters=None, dry_run=False):
+ def get_all_network_interfaces(self, network_interface_ids=None, filters=None, dry_run=False):
"""
Retrieve all of the Elastic Network Interfaces (ENI's)
associated with your account.
+ :type network_interface_ids: list
+ :param network_interface_ids: a list of strings representing ENI IDs
+
:type filters: dict
:param filters: Optional filters that can be used to limit
the results returned. Filters are provided
@@ -4206,6 +4258,8 @@ class EC2Connection(AWSQueryConnection):
:return: A list of :class:`boto.ec2.networkinterface.NetworkInterface`
"""
params = {}
+ if network_interface_ids:
+ self.build_list_params(params, network_interface_ids, 'NetworkInterfaceId')
if filters:
self.build_filter_params(params, filters)
if dry_run:
@@ -4339,7 +4393,8 @@ class EC2Connection(AWSQueryConnection):
"""
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
-
+ :rtype: :class:`boto.ec2.image.CopyImage`
+ :return: Object containing the image_id of the copied image.
"""
params = {
'SourceRegion': source_region,
diff --git a/awx/lib/site-packages/boto/ec2/ec2object.py b/awx/lib/site-packages/boto/ec2/ec2object.py
index f697e66483..383602e56f 100644
--- a/awx/lib/site-packages/boto/ec2/ec2object.py
+++ b/awx/lib/site-packages/boto/ec2/ec2object.py
@@ -85,6 +85,27 @@ class TaggedEC2Object(EC2Object):
self.tags = TagSet()
self.tags[key] = value
+ def add_tags(self, tags, dry_run=False):
+ """
+ Add tags to this object. Tags are stored by AWS and can be used
+ to organize and filter resources. Adding tags involves a round-trip
+ to the EC2 service.
+
+ :type tags: dict
+ :param tags: A dictionary of key-value pairs for the tags being stored.
+ If for some tags you want only the name and no value, the
+ corresponding value for that tag name should be an empty
+ string.
+ """
+ status = self.connection.create_tags(
+ [self.id],
+ tags,
+ dry_run=dry_run
+ )
+ if self.tags is None:
+ self.tags = TagSet()
+ self.tags.update(tags)
+
def remove_tag(self, key, value=None, dry_run=False):
"""
Remove a tag from this object. Removing a tag involves a round-trip
@@ -102,7 +123,7 @@ class TaggedEC2Object(EC2Object):
NOTE: There is an important distinction between
a value of '' and a value of None.
"""
- if value:
+ if value is not None:
tags = {key : value}
else:
tags = [key]
diff --git a/awx/lib/site-packages/boto/ec2/elb/__init__.py b/awx/lib/site-packages/boto/ec2/elb/__init__.py
index d308c72639..9971e06078 100644
--- a/awx/lib/site-packages/boto/ec2/elb/__init__.py
+++ b/awx/lib/site-packages/boto/ec2/elb/__init__.py
@@ -30,9 +30,9 @@ from boto.ec2.instanceinfo import InstanceInfo
from boto.ec2.elb.loadbalancer import LoadBalancer, LoadBalancerZones
from boto.ec2.elb.instancestate import InstanceState
from boto.ec2.elb.healthcheck import HealthCheck
-from boto.ec2.elb.listelement import ListElement
from boto.regioninfo import RegionInfo, get_regions, load_regions
import boto
+from boto.compat import six
RegionData = load_regions().get('elasticloadbalancing', {})
@@ -68,8 +68,9 @@ class ELBConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'elb_version', '2012-06-01')
DefaultRegionName = boto.config.get('Boto', 'elb_region_name', 'us-east-1')
- DefaultRegionEndpoint = boto.config.get('Boto', 'elb_region_endpoint',
- 'elasticloadbalancing.us-east-1.amazonaws.com')
+ DefaultRegionEndpoint = boto.config.get(
+ 'Boto', 'elb_region_endpoint',
+ 'elasticloadbalancing.us-east-1.amazonaws.com')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
@@ -87,31 +88,37 @@ class ELBConnection(AWSQueryConnection):
self.DefaultRegionEndpoint)
self.region = region
super(ELBConnection, self).__init__(aws_access_key_id,
- aws_secret_access_key,
- is_secure, port, proxy, proxy_port,
- proxy_user, proxy_pass,
- self.region.endpoint, debug,
- https_connection_factory, path,
- security_token,
- validate_certs=validate_certs,
- profile_name=profile_name)
+ aws_secret_access_key,
+ is_secure, port, proxy, proxy_port,
+ proxy_user, proxy_pass,
+ self.region.endpoint, debug,
+ https_connection_factory, path,
+ security_token,
+ validate_certs=validate_certs,
+ profile_name=profile_name)
def _required_auth_capability(self):
return ['ec2']
def build_list_params(self, params, items, label):
- if isinstance(items, basestring):
+ if isinstance(items, six.string_types):
items = [items]
for index, item in enumerate(items):
params[label % (index + 1)] = item
- def get_all_load_balancers(self, load_balancer_names=None):
+ def get_all_load_balancers(self, load_balancer_names=None, marker=None):
"""
Retrieve all load balancers associated with your account.
:type load_balancer_names: list
:keyword load_balancer_names: An optional list of load balancer names.
+ :type marker: string
+ :param marker: Use this only when paginating results and only
+ in follow-up request after you've received a response
+ where the results are truncated. Set this to the value of
+ the Marker element in the response you just received.
+
:rtype: :py:class:`boto.resultset.ResultSet`
:return: A ResultSet containing instances of
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
@@ -120,11 +127,16 @@ class ELBConnection(AWSQueryConnection):
if load_balancer_names:
self.build_list_params(params, load_balancer_names,
'LoadBalancerNames.member.%d')
+
+ if marker:
+ params['Marker'] = marker
+
return self.get_list('DescribeLoadBalancers', params,
[('member', LoadBalancer)])
def create_load_balancer(self, name, zones, listeners=None, subnets=None,
- security_groups=None, scheme='internet-facing', complex_listeners=None):
+ security_groups=None, scheme='internet-facing',
+ complex_listeners=None):
"""
Create a new load balancer for your account. By default the load
balancer will be created in EC2. To create a load balancer inside a
@@ -170,13 +182,14 @@ class ELBConnection(AWSQueryConnection):
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
- (LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
- SSLCertificateId).
+ (LoadBalancerPortNumber, InstancePortNumber, Protocol,
+ InstanceProtocol, SSLCertificateId).
Where:
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535
- - Protocol and InstanceProtocol is a string containing either 'TCP',
+ - Protocol and InstanceProtocol is a string containing
+ either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
@@ -224,7 +237,7 @@ class ELBConnection(AWSQueryConnection):
if security_groups:
self.build_list_params(params, security_groups,
- 'SecurityGroups.member.%d')
+ 'SecurityGroups.member.%d')
load_balancer = self.get_object('CreateLoadBalancer',
params, LoadBalancer)
@@ -235,7 +248,8 @@ class ELBConnection(AWSQueryConnection):
load_balancer.security_groups = security_groups
return load_balancer
- def create_load_balancer_listeners(self, name, listeners=None, complex_listeners=None):
+ def create_load_balancer_listeners(self, name, listeners=None,
+ complex_listeners=None):
"""
Creates a Listener (or group of listeners) for an existing
Load Balancer
@@ -254,13 +268,14 @@ class ELBConnection(AWSQueryConnection):
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
- (LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
- SSLCertificateId).
+ (LoadBalancerPortNumber, InstancePortNumber, Protocol,
+ InstanceProtocol, SSLCertificateId).
Where:
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535
- - Protocol and InstanceProtocol is a string containing either 'TCP',
+ - Protocol and InstanceProtocol is a string containing
+ either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
@@ -347,7 +362,7 @@ class ELBConnection(AWSQueryConnection):
self.build_list_params(params, zones_to_add,
'AvailabilityZones.member.%d')
obj = self.get_object('EnableAvailabilityZonesForLoadBalancer',
- params, LoadBalancerZones)
+ params, LoadBalancerZones)
return obj.zones
def disable_availability_zones(self, load_balancer_name, zones_to_remove):
@@ -372,7 +387,7 @@ class ELBConnection(AWSQueryConnection):
self.build_list_params(params, zones_to_remove,
'AvailabilityZones.member.%d')
obj = self.get_object('DisableAvailabilityZonesForLoadBalancer',
- params, LoadBalancerZones)
+ params, LoadBalancerZones)
return obj.zones
def modify_lb_attribute(self, load_balancer_name, attribute, value):
@@ -386,6 +401,7 @@ class ELBConnection(AWSQueryConnection):
* crossZoneLoadBalancing - Boolean (true)
* accessLog - :py:class:`AccessLogAttribute` instance
+ * connectionDraining - :py:class:`ConnectionDrainingAttribute` instance
:type value: string
:param value: The new value for the attribute
@@ -415,6 +431,11 @@ class ELBConnection(AWSQueryConnection):
value.s3_bucket_prefix
params['LoadBalancerAttributes.AccessLog.EmitInterval'] = \
value.emit_interval
+ elif attribute.lower() == 'connectiondraining':
+ params['LoadBalancerAttributes.ConnectionDraining.Enabled'] = \
+ value.enabled and 'true' or 'false'
+ params['LoadBalancerAttributes.ConnectionDraining.Timeout'] = \
+ value.timeout
else:
raise ValueError('InvalidAttribute', attribute)
return self.get_status('ModifyLoadBalancerAttributes', params,
@@ -445,14 +466,21 @@ class ELBConnection(AWSQueryConnection):
:type attribute: string
:param attribute: The attribute you wish to see.
+ * accessLog - :py:class:`AccessLogAttribute` instance
* crossZoneLoadBalancing - Boolean
+ * connectionDraining - :py:class:`ConnectionDrainingAttribute`
+ instance
:rtype: Attribute dependent
:return: The new value for the attribute
"""
attributes = self.get_all_lb_attributes(load_balancer_name)
+ if attribute.lower() == 'accesslog':
+ return attributes.access_log
if attribute.lower() == 'crosszoneloadbalancing':
return attributes.cross_zone_load_balancing.enabled
+ if attribute.lower() == 'connectiondraining':
+ return attributes.connection_draining
return None
def register_instances(self, load_balancer_name, instances):
@@ -601,17 +629,19 @@ class ELBConnection(AWSQueryConnection):
params['CookieExpirationPeriod'] = cookie_expiration_period
return self.get_status('CreateLBCookieStickinessPolicy', params)
- def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes):
+ def create_lb_policy(self, lb_name, policy_name, policy_type,
+ policy_attributes):
"""
- Creates a new policy that contais the necessary attributes depending on
- the policy type. Policies are settings that are saved for your load
- balancer and that can be applied to the front-end listener, or
- the back-end application server.
+ Creates a new policy that contains the necessary attributes
+ depending on the policy type. Policies are settings that are
+ saved for your load balancer and that can be applied to the
+ front-end listener, or the back-end application server.
+
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name,
'PolicyTypeName': policy_type}
- for index, (name, value) in enumerate(policy_attributes.iteritems(), 1):
+ for index, (name, value) in enumerate(six.iteritems(policy_attributes), 1):
params['PolicyAttributes.member.%d.AttributeName' % index] = name
params['PolicyAttributes.member.%d.AttributeValue' % index] = value
else:
@@ -635,10 +665,14 @@ class ELBConnection(AWSQueryConnection):
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port}
- self.build_list_params(params, policies, 'PolicyNames.member.%d')
+ if len(policies):
+ self.build_list_params(params, policies, 'PolicyNames.member.%d')
+ else:
+ params['PolicyNames'] = ''
return self.get_status('SetLoadBalancerPoliciesOfListener', params)
- def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies):
+ def set_lb_policies_of_backend_server(self, lb_name, instance_port,
+ policies):
"""
Replaces the current set of policies associated with a port on which
the back-end server is listening with a new set of policies.
@@ -649,7 +683,8 @@ class ELBConnection(AWSQueryConnection):
self.build_list_params(params, policies, 'PolicyNames.member.%d')
else:
params['PolicyNames'] = ''
- return self.get_status('SetLoadBalancerPoliciesForBackendServer', params)
+ return self.get_status('SetLoadBalancerPoliciesForBackendServer',
+ params)
def apply_security_groups_to_lb(self, name, security_groups):
"""
diff --git a/awx/lib/site-packages/boto/ec2/elb/attributes.py b/awx/lib/site-packages/boto/ec2/elb/attributes.py
index edf3eedcc0..05ca8f82e7 100644
--- a/awx/lib/site-packages/boto/ec2/elb/attributes.py
+++ b/awx/lib/site-packages/boto/ec2/elb/attributes.py
@@ -74,6 +74,31 @@ class AccessLogAttribute(object):
elif name == 'EmitInterval':
self.emit_interval = int(value)
+class ConnectionDrainingAttribute(object):
+ """
+ Represents the ConnectionDraining segment of ELB attributes.
+ """
+ def __init__(self, connection=None):
+ self.enabled = None
+ self.timeout = None
+
+ def __repr__(self):
+ return 'ConnectionDraining(%s, %s)' % (
+ self.enabled,
+ self.timeout
+ )
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'Enabled':
+ if value.lower() == 'true':
+ self.enabled = True
+ else:
+ self.enabled = False
+ elif name == 'Timeout':
+ self.timeout = int(value)
class LbAttributes(object):
"""
@@ -84,17 +109,21 @@ class LbAttributes(object):
self.cross_zone_load_balancing = CrossZoneLoadBalancingAttribute(
self.connection)
self.access_log = AccessLogAttribute(self.connection)
+ self.connection_draining = ConnectionDrainingAttribute(self.connection)
def __repr__(self):
- return 'LbAttributes(%s, %s)' % (
+ return 'LbAttributes(%s, %s, %s)' % (
repr(self.cross_zone_load_balancing),
- repr(self.access_log))
+ repr(self.access_log),
+ repr(self.connection_draining))
def startElement(self, name, attrs, connection):
if name == 'CrossZoneLoadBalancing':
return self.cross_zone_load_balancing
if name == 'AccessLog':
return self.access_log
+ if name == 'ConnectionDraining':
+ return self.connection_draining
def endElement(self, name, value, connection):
pass
diff --git a/awx/lib/site-packages/boto/ec2/elb/loadbalancer.py b/awx/lib/site-packages/boto/ec2/elb/loadbalancer.py
index f76feb15cc..3a065cf3a9 100644
--- a/awx/lib/site-packages/boto/ec2/elb/loadbalancer.py
+++ b/awx/lib/site-packages/boto/ec2/elb/loadbalancer.py
@@ -27,6 +27,7 @@ from boto.ec2.elb.policies import Policies, OtherPolicy
from boto.ec2.elb.securitygroup import SecurityGroup
from boto.ec2.instanceinfo import InstanceInfo
from boto.resultset import ResultSet
+from boto.compat import six
class Backend(object):
@@ -67,6 +68,7 @@ class LoadBalancerZones(object):
def endElement(self, name, value, connection):
pass
+
class LoadBalancer(object):
"""
Represents an EC2 Load Balancer.
@@ -82,6 +84,7 @@ class LoadBalancer(object):
check policy for this load balancer.
:ivar boto.ec2.elb.policies.Policies policies: Cookie stickiness and
other policies.
+ :ivar str name: The name of the Load Balancer.
:ivar str dns_name: The external DNS name for the balancer.
:ivar str created_time: A date+time string showing when the
load balancer was created.
@@ -186,7 +189,7 @@ class LoadBalancer(object):
:param zones: The name of the zone(s) to add.
"""
- if isinstance(zones, basestring):
+ if isinstance(zones, six.string_types):
zones = [zones]
new_zones = self.connection.enable_availability_zones(self.name, zones)
self.availability_zones = new_zones
@@ -199,9 +202,10 @@ class LoadBalancer(object):
:param zones: The name of the zone(s) to add.
"""
- if isinstance(zones, basestring):
+ if isinstance(zones, six.string_types):
zones = [zones]
- new_zones = self.connection.disable_availability_zones(self.name, zones)
+ new_zones = self.connection.disable_availability_zones(
+ self.name, zones)
self.availability_zones = new_zones
def get_attributes(self, force=False):
@@ -266,7 +270,7 @@ class LoadBalancer(object):
to add to this load balancer.
"""
- if isinstance(instances, basestring):
+ if isinstance(instances, six.string_types):
instances = [instances]
new_instances = self.connection.register_instances(self.name,
instances)
@@ -281,7 +285,7 @@ class LoadBalancer(object):
to remove from this load balancer.
"""
- if isinstance(instances, basestring):
+ if isinstance(instances, six.string_types):
instances = [instances]
new_instances = self.connection.deregister_instances(self.name,
instances)
@@ -348,14 +352,13 @@ class LoadBalancer(object):
policies)
def set_policies_of_backend_server(self, instance_port, policies):
- return self.connection.set_lb_policies_of_backend_server(self.name,
- instance_port,
- policies)
-
+ return self.connection.set_lb_policies_of_backend_server(
+ self.name, instance_port, policies)
def create_cookie_stickiness_policy(self, cookie_expiration_period,
policy_name):
- return self.connection.create_lb_cookie_stickiness_policy(cookie_expiration_period, self.name, policy_name)
+ return self.connection.create_lb_cookie_stickiness_policy(
+ cookie_expiration_period, self.name, policy_name)
def create_app_cookie_stickiness_policy(self, name, policy_name):
return self.connection.create_app_cookie_stickiness_policy(name,
@@ -363,12 +366,12 @@ class LoadBalancer(object):
policy_name)
def set_listener_SSL_certificate(self, lb_port, ssl_certificate_id):
- return self.connection.set_lb_listener_SSL_certificate(self.name,
- lb_port,
- ssl_certificate_id)
+ return self.connection.set_lb_listener_SSL_certificate(
+ self.name, lb_port, ssl_certificate_id)
def create_lb_policy(self, policy_name, policy_type, policy_attribute):
- return self.connection.create_lb_policy(self.name, policy_name, policy_type, policy_attribute)
+ return self.connection.create_lb_policy(
+ self.name, policy_name, policy_type, policy_attribute)
def attach_subnets(self, subnets):
"""
@@ -380,7 +383,7 @@ class LoadBalancer(object):
:param subnets: The name of the subnet(s) to add.
"""
- if isinstance(subnets, basestring):
+ if isinstance(subnets, six.string_types):
subnets = [subnets]
new_subnets = self.connection.attach_lb_to_subnets(self.name, subnets)
self.subnets = new_subnets
@@ -393,9 +396,10 @@ class LoadBalancer(object):
:param subnets: The name of the subnet(s) to detach.
"""
- if isinstance(subnets, basestring):
+ if isinstance(subnets, six.string_types):
subnets = [subnets]
- new_subnets = self.connection.detach_lb_from_subnets(self.name, subnets)
+ new_subnets = self.connection.detach_lb_from_subnets(
+ self.name, subnets)
self.subnets = new_subnets
def apply_security_groups(self, security_groups):
@@ -408,8 +412,8 @@ class LoadBalancer(object):
:param security_groups: The name of the security group(s) to add.
"""
- if isinstance(security_groups, basestring):
+ if isinstance(security_groups, six.string_types):
security_groups = [security_groups]
new_sgs = self.connection.apply_security_groups_to_lb(
- self.name, security_groups)
+ self.name, security_groups)
self.security_groups = new_sgs
diff --git a/awx/lib/site-packages/boto/ec2/image.py b/awx/lib/site-packages/boto/ec2/image.py
index 807811dcc8..f94f77e816 100644
--- a/awx/lib/site-packages/boto/ec2/image.py
+++ b/awx/lib/site-packages/boto/ec2/image.py
@@ -233,6 +233,9 @@ class Image(TaggedEC2Object):
* i2.2xlarge
* i2.4xlarge
* i2.8xlarge
+ * t2.micro
+ * t2.small
+ * t2.medium
:type placement: string
:param placement: The Availability Zone to launch the instance into.
diff --git a/awx/lib/site-packages/boto/ec2/keypair.py b/awx/lib/site-packages/boto/ec2/keypair.py
index 9323c24a5a..b399a97ecb 100644
--- a/awx/lib/site-packages/boto/ec2/keypair.py
+++ b/awx/lib/site-packages/boto/ec2/keypair.py
@@ -83,7 +83,7 @@ class KeyPair(EC2Object):
fp = open(file_path, 'wb')
fp.write(self.material)
fp.close()
- os.chmod(file_path, 0600)
+ os.chmod(file_path, 0o600)
return True
else:
raise BotoClientError('KeyPair contains no material')
diff --git a/awx/lib/site-packages/boto/ec2/networkinterface.py b/awx/lib/site-packages/boto/ec2/networkinterface.py
index b786edb688..6596439e32 100644
--- a/awx/lib/site-packages/boto/ec2/networkinterface.py
+++ b/awx/lib/site-packages/boto/ec2/networkinterface.py
@@ -167,6 +167,70 @@ class NetworkInterface(TaggedEC2Object):
else:
setattr(self, name, value)
+ def _update(self, updated):
+ self.__dict__.update(updated.__dict__)
+
+ def update(self, validate=False, dry_run=False):
+ """
+ Update the data associated with this ENI by querying EC2.
+
+ :type validate: bool
+ :param validate: By default, if EC2 returns no data about the
+ ENI the update method returns quietly. If
+ the validate param is True, however, it will
+ raise a ValueError exception if no data is
+ returned from EC2.
+ """
+ rs = self.connection.get_all_network_interfaces(
+ [self.id],
+ dry_run=dry_run
+ )
+ if len(rs) > 0:
+ self._update(rs[0])
+ elif validate:
+ raise ValueError('%s is not a valid ENI ID' % self.id)
+ return self.status
+
+ def attach(self, instance_id, device_index, dry_run=False):
+ """
+ Attach this ENI to an EC2 instance.
+
+ :type instance_id: str
+ :param instance_id: The ID of the EC2 instance to which it will
+ be attached.
+
+ :type device_index: int
+ :param device_index: The interface nunber, N, on the instance (eg. ethN)
+
+ :rtype: bool
+ :return: True if successful
+ """
+ return self.connection.attach_network_interface(
+ self.id,
+ instance_id,
+ device_index,
+ dry_run=dry_run
+ )
+
+ def detach(self, force=False, dry_run=False):
+ """
+ Detach this ENI from an EC2 instance.
+
+ :type force: bool
+ :param force: Forces detachment if the previous detachment
+ attempt did not occur cleanly.
+
+ :rtype: bool
+ :return: True if successful
+ """
+ attachment_id = getattr(self.attachment, 'id', None)
+
+ return self.connection.detach_network_interface(
+ attachment_id,
+ force,
+ dry_run=dry_run
+ )
+
def delete(self, dry_run=False):
return self.connection.delete_network_interface(
self.id,
diff --git a/awx/lib/site-packages/boto/ec2/reservedinstance.py b/awx/lib/site-packages/boto/ec2/reservedinstance.py
index b76aaa87fe..72d2bf3023 100644
--- a/awx/lib/site-packages/boto/ec2/reservedinstance.py
+++ b/awx/lib/site-packages/boto/ec2/reservedinstance.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
from boto.resultset import ResultSet
from boto.ec2.ec2object import EC2Object
from boto.utils import parse_ts
@@ -82,13 +81,13 @@ class ReservedInstancesOffering(EC2Object):
self.marketplace = True if value == 'true' else False
def describe(self):
- print 'ID=%s' % self.id
- print '\tInstance Type=%s' % self.instance_type
- print '\tZone=%s' % self.availability_zone
- print '\tDuration=%s' % self.duration
- print '\tFixed Price=%s' % self.fixed_price
- print '\tUsage Price=%s' % self.usage_price
- print '\tDescription=%s' % self.description
+ print('ID=%s' % self.id)
+ print('\tInstance Type=%s' % self.instance_type)
+ print('\tZone=%s' % self.availability_zone)
+ print('\tDuration=%s' % self.duration)
+ print('\tFixed Price=%s' % self.fixed_price)
+ print('\tUsage Price=%s' % self.usage_price)
+ print('\tDescription=%s' % self.description)
def purchase(self, instance_count=1, dry_run=False):
return self.connection.purchase_reserved_instance_offering(
diff --git a/awx/lib/site-packages/boto/ec2/snapshot.py b/awx/lib/site-packages/boto/ec2/snapshot.py
index 6121d0c825..22f69ab216 100644
--- a/awx/lib/site-packages/boto/ec2/snapshot.py
+++ b/awx/lib/site-packages/boto/ec2/snapshot.py
@@ -41,6 +41,7 @@ class Snapshot(TaggedEC2Object):
self.owner_alias = None
self.volume_size = None
self.description = None
+ self.encrypted = None
def __repr__(self):
return 'Snapshot:%s' % self.id
@@ -65,6 +66,8 @@ class Snapshot(TaggedEC2Object):
self.volume_size = value
elif name == 'description':
self.description = value
+ elif name == 'encrypted':
+ self.encrypted = (value.lower() == 'true')
else:
setattr(self, name, value)
@@ -152,6 +155,7 @@ class Snapshot(TaggedEC2Object):
self.id,
volume_type,
iops,
+ self.encrypted,
dry_run=dry_run
)
diff --git a/awx/lib/site-packages/boto/ec2/volume.py b/awx/lib/site-packages/boto/ec2/volume.py
index 95121fa813..c40062b37c 100644
--- a/awx/lib/site-packages/boto/ec2/volume.py
+++ b/awx/lib/site-packages/boto/ec2/volume.py
@@ -44,6 +44,7 @@ class Volume(TaggedEC2Object):
:ivar type: The type of volume (standard or consistent-iops)
:ivar iops: If this volume is of type consistent-iops, this is
the number of IOPS provisioned (10-300).
+ :ivar encrypted: True if this volume is encrypted.
"""
def __init__(self, connection=None):
@@ -57,6 +58,7 @@ class Volume(TaggedEC2Object):
self.zone = None
self.type = None
self.iops = None
+ self.encrypted = None
def __repr__(self):
return 'Volume:%s' % self.id
@@ -92,6 +94,8 @@ class Volume(TaggedEC2Object):
self.type = value
elif name == 'iops':
self.iops = int(value)
+ elif name == 'encrypted':
+ self.encrypted = (value.lower() == 'true')
else:
setattr(self, name, value)
diff --git a/awx/lib/site-packages/boto/ecs/__init__.py b/awx/lib/site-packages/boto/ecs/__init__.py
index d643afc76a..46db50650e 100644
--- a/awx/lib/site-packages/boto/ecs/__init__.py
+++ b/awx/lib/site-packages/boto/ecs/__init__.py
@@ -21,6 +21,7 @@
import boto
from boto.connection import AWSQueryConnection, AWSAuthConnection
+from boto.exception import BotoServerError
import time
import urllib
import xml.sax
@@ -61,20 +62,22 @@ class ECSConnection(AWSQueryConnection):
if page:
params['ItemPage'] = page
response = self.make_request(None, params, "/onca/xml")
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status != 200:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ raise BotoServerError(response.status, response.reason, body)
if itemSet is None:
rs = ItemSet(self, action, params, page)
else:
rs = itemSet
h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
+ xml.sax.parseString(body.encode('utf-8'), h)
+ if not rs.is_valid:
+ raise BotoServerError(response.status, '{Code}: {Message}'.format(**rs.errors[0]))
return rs
#
@@ -91,3 +94,12 @@ class ECSConnection(AWSQueryConnection):
"""
params['SearchIndex'] = search_index
return self.get_response('ItemSearch', params)
+
+ def item_lookup(self, **params):
+ """
+ Returns items that satisfy the lookup query.
+
+ For a full list of parameters, see:
+ http://s3.amazonaws.com/awsdocs/Associates/2011-08-01/prod-adv-api-dg-2011-08-01.pdf
+ """
+ return self.get_response('ItemLookup', params)
\ No newline at end of file
diff --git a/awx/lib/site-packages/boto/ecs/item.py b/awx/lib/site-packages/boto/ecs/item.py
index 4349e41e1d..79177a31d4 100644
--- a/awx/lib/site-packages/boto/ecs/item.py
+++ b/awx/lib/site-packages/boto/ecs/item.py
@@ -22,7 +22,7 @@
import xml.sax
import cgi
-from StringIO import StringIO
+from boto.compat import six, StringIO
class ResponseGroup(xml.sax.ContentHandler):
"""A Generic "Response Group", which can
@@ -90,14 +90,14 @@ class Item(ResponseGroup):
def __init__(self, connection=None):
"""Initialize this Item"""
- super(Item, self).__init__(connection, "Item")
+ ResponseGroup.__init__(self, connection, "Item")
class ItemSet(ResponseGroup):
"""A special ResponseGroup that has built-in paging, and
only creates new Items on the "Item" tag"""
def __init__(self, connection, action, params, page=0):
- super(ItemSet, self).__init__(connection, "Items")
+ ResponseGroup.__init__(self, connection, "Items")
self.objs = []
self.iter = None
self.page = page
@@ -106,6 +106,8 @@ class ItemSet(ResponseGroup):
self.curItem = None
self.total_results = 0
self.total_pages = 0
+ self.is_valid = False
+ self.errors = []
def startElement(self, name, attrs, connection):
if name == "Item":
@@ -119,7 +121,14 @@ class ItemSet(ResponseGroup):
self.total_results = value
elif name == 'TotalPages':
self.total_pages = value
- elif name == "Item":
+ elif name == 'IsValid':
+ if value == 'True':
+ self.is_valid = True
+ elif name == 'Code':
+ self.errors.append({'Code': value, 'Message': None})
+ elif name == 'Message':
+ self.errors[-1]['Message'] = value
+ elif name == 'Item':
self.objs.append(self.curItem)
self._xml.write(self.curItem.to_xml())
self.curItem = None
@@ -127,22 +136,24 @@ class ItemSet(ResponseGroup):
self.curItem.endElement(name, value, connection)
return None
- def next(self):
+ def __next__(self):
"""Special paging functionality"""
if self.iter is None:
self.iter = iter(self.objs)
try:
- return self.iter.next()
+ return next(self.iter)
except StopIteration:
self.iter = None
self.objs = []
if int(self.page) < int(self.total_pages):
self.page += 1
self._connection.get_response(self.action, self.params, self.page, self)
- return self.next()
+ return next(self)
else:
raise
+ next = __next__
+
def __iter__(self):
return self
@@ -150,4 +161,4 @@ class ItemSet(ResponseGroup):
"""Override to first fetch everything"""
for item in self:
pass
- return super(ItemSet, self).to_xml()
+ return ResponseGroup.to_xml(self)
diff --git a/awx/lib/site-packages/boto/elasticache/layer1.py b/awx/lib/site-packages/boto/elasticache/layer1.py
index be7080d0e4..59c43a3dc1 100644
--- a/awx/lib/site-packages/boto/elasticache/layer1.py
+++ b/awx/lib/site-packages/boto/elasticache/layer1.py
@@ -1657,7 +1657,7 @@ class ElastiCacheConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
path='/', params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/awx/lib/site-packages/boto/elastictranscoder/layer1.py b/awx/lib/site-packages/boto/elastictranscoder/layer1.py
index e47c199c7e..3189f35d48 100644
--- a/awx/lib/site-packages/boto/elastictranscoder/layer1.py
+++ b/awx/lib/site-packages/boto/elastictranscoder/layer1.py
@@ -923,7 +923,7 @@ class ElasticTranscoderConnection(AWSAuthConnection):
headers = {}
response = super(ElasticTranscoderConnection, self).make_request(
verb, resource, headers=headers, data=data)
- body = json.load(response)
+ body = json.loads(response.read().decode('utf-8'))
if response.status == expected_status:
return body
else:
diff --git a/awx/lib/site-packages/boto/emr/__init__.py b/awx/lib/site-packages/boto/emr/__init__.py
index b04d08fecd..5f3181d289 100644
--- a/awx/lib/site-packages/boto/emr/__init__.py
+++ b/awx/lib/site-packages/boto/emr/__init__.py
@@ -26,9 +26,9 @@
This module provies an interface to the Elastic MapReduce (EMR)
service from AWS.
"""
-from connection import EmrConnection
-from step import Step, StreamingStep, JarStep
-from bootstrap_action import BootstrapAction
+from boto.emr.connection import EmrConnection
+from boto.emr.step import Step, StreamingStep, JarStep
+from boto.emr.bootstrap_action import BootstrapAction
from boto.regioninfo import RegionInfo, get_regions
diff --git a/awx/lib/site-packages/boto/emr/bootstrap_action.py b/awx/lib/site-packages/boto/emr/bootstrap_action.py
index 7db0b3da31..5a01fd21cc 100644
--- a/awx/lib/site-packages/boto/emr/bootstrap_action.py
+++ b/awx/lib/site-packages/boto/emr/bootstrap_action.py
@@ -20,12 +20,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class BootstrapAction(object):
def __init__(self, name, path, bootstrap_action_args):
self.name = name
self.path = path
- if isinstance(bootstrap_action_args, basestring):
+ if isinstance(bootstrap_action_args, six.string_types):
bootstrap_action_args = [bootstrap_action_args]
self.bootstrap_action_args = bootstrap_action_args
diff --git a/awx/lib/site-packages/boto/emr/connection.py b/awx/lib/site-packages/boto/emr/connection.py
index 6c5222ad1a..d15852ea25 100644
--- a/awx/lib/site-packages/boto/emr/connection.py
+++ b/awx/lib/site-packages/boto/emr/connection.py
@@ -37,6 +37,7 @@ from boto.emr.emrobject import AddInstanceGroupsResponse, BootstrapActionList, \
from boto.emr.step import JarStep
from boto.connection import AWSQueryConnection
from boto.exception import EmrResponseError
+from boto.compat import six
class EmrConnection(AWSQueryConnection):
@@ -281,7 +282,7 @@ class EmrConnection(AWSQueryConnection):
value for that tag should be the empty string
(e.g. '') or None.
"""
- assert isinstance(resource_id, basestring)
+ assert isinstance(resource_id, six.string_types)
params = {
'ResourceId': resource_id,
}
@@ -333,7 +334,7 @@ class EmrConnection(AWSQueryConnection):
:type steps: list(boto.emr.Step)
:param steps: A list of steps to add to the job
"""
- if not isinstance(steps, types.ListType):
+ if not isinstance(steps, list):
steps = [steps]
params = {}
params['JobFlowId'] = jobflow_id
@@ -356,7 +357,7 @@ class EmrConnection(AWSQueryConnection):
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: A list of instance groups to add to the job
"""
- if not isinstance(instance_groups, types.ListType):
+ if not isinstance(instance_groups, list):
instance_groups = [instance_groups]
params = {}
params['JobFlowId'] = jobflow_id
@@ -377,9 +378,9 @@ class EmrConnection(AWSQueryConnection):
:type new_sizes: list(int)
:param new_sizes: A list of the new sizes for each instance group
"""
- if not isinstance(instance_group_ids, types.ListType):
+ if not isinstance(instance_group_ids, list):
instance_group_ids = [instance_group_ids]
- if not isinstance(new_sizes, types.ListType):
+ if not isinstance(new_sizes, list):
new_sizes = [new_sizes]
instance_groups = zip(instance_group_ids, new_sizes)
@@ -409,7 +410,8 @@ class EmrConnection(AWSQueryConnection):
ami_version=None,
api_params=None,
visible_to_all_users=None,
- job_flow_role=None):
+ job_flow_role=None,
+ service_role=None):
"""
Runs a job flow
:type name: str
@@ -491,6 +493,10 @@ class EmrConnection(AWSQueryConnection):
``EMRJobflowDefault``. In order to use the default role,
you must have already created it using the CLI.
+ :type service_role: str
+ :param service_role: The IAM role that will be assumed by the Amazon
+ EMR service to access AWS resources on your behalf.
+
:rtype: str
:return: The jobflow id
"""
@@ -524,7 +530,7 @@ class EmrConnection(AWSQueryConnection):
# Instance group args (for spot instances or a heterogenous cluster)
list_args = self._build_instance_group_list_args(instance_groups)
instance_params = dict(
- ('Instances.%s' % k, v) for k, v in list_args.iteritems()
+ ('Instances.%s' % k, v) for k, v in six.iteritems(list_args)
)
params.update(instance_params)
@@ -553,7 +559,7 @@ class EmrConnection(AWSQueryConnection):
params['AdditionalInfo'] = additional_info
if api_params:
- for key, value in api_params.iteritems():
+ for key, value in six.iteritems(api_params):
if value is None:
params.pop(key, None)
else:
@@ -568,6 +574,9 @@ class EmrConnection(AWSQueryConnection):
if job_flow_role is not None:
params['JobFlowRole'] = job_flow_role
+ if service_role is not None:
+ params['ServiceRole'] = service_role
+
response = self.get_object(
'RunJobFlow', params, RunJobFlowResponse, verb='POST')
return response.jobflowid
@@ -641,27 +650,27 @@ class EmrConnection(AWSQueryConnection):
return step_params
def _build_bootstrap_action_list(self, bootstrap_actions):
- if not isinstance(bootstrap_actions, types.ListType):
+ if not isinstance(bootstrap_actions, list):
bootstrap_actions = [bootstrap_actions]
params = {}
for i, bootstrap_action in enumerate(bootstrap_actions):
- for key, value in bootstrap_action.iteritems():
+ for key, value in six.iteritems(bootstrap_action):
params['BootstrapActions.member.%s.%s' % (i + 1, key)] = value
return params
def _build_step_list(self, steps):
- if not isinstance(steps, types.ListType):
+ if not isinstance(steps, list):
steps = [steps]
params = {}
for i, step in enumerate(steps):
- for key, value in step.iteritems():
+ for key, value in six.iteritems(step):
params['Steps.member.%s.%s' % (i+1, key)] = value
return params
def _build_string_list(self, field, items):
- if not isinstance(items, types.ListType):
+ if not isinstance(items, list):
items = [items]
params = {}
@@ -673,7 +682,7 @@ class EmrConnection(AWSQueryConnection):
assert isinstance(tags, dict)
params = {}
- for i, key_value in enumerate(sorted(tags.iteritems()), start=1):
+ for i, key_value in enumerate(sorted(six.iteritems(tags)), start=1):
key, value = key_value
current_prefix = 'Tags.member.%s' % i
params['%s.Key' % current_prefix] = key
@@ -734,12 +743,12 @@ class EmrConnection(AWSQueryConnection):
a comparable dict for use in making a RunJobFlow or AddInstanceGroups
request.
"""
- if not isinstance(instance_groups, types.ListType):
+ if not isinstance(instance_groups, list):
instance_groups = [instance_groups]
params = {}
for i, instance_group in enumerate(instance_groups):
ig_dict = self._build_instance_group_args(instance_group)
- for key, value in ig_dict.iteritems():
+ for key, value in six.iteritems(ig_dict):
params['InstanceGroups.member.%d.%s' % (i+1, key)] = value
return params
diff --git a/awx/lib/site-packages/boto/emr/emrobject.py b/awx/lib/site-packages/boto/emr/emrobject.py
index 0906bfabbe..f605834c72 100644
--- a/awx/lib/site-packages/boto/emr/emrobject.py
+++ b/awx/lib/site-packages/boto/emr/emrobject.py
@@ -301,7 +301,7 @@ class ClusterSummaryList(EmrObject):
class StepConfig(EmrObject):
Fields = set([
- 'Jar'
+ 'Jar',
'MainClass'
])
@@ -434,11 +434,15 @@ class StepSummary(EmrObject):
def __init__(self, connection=None):
self.connection = connection
self.status = None
+ self.config = None
def startElement(self, name, attrs, connection):
if name == 'Status':
self.status = ClusterStatus()
return self.status
+ elif name == 'Config':
+ self.config = StepConfig()
+ return self.config
else:
return None
diff --git a/awx/lib/site-packages/boto/emr/step.py b/awx/lib/site-packages/boto/emr/step.py
index 4cb7889819..de6835fb4e 100644
--- a/awx/lib/site-packages/boto/emr/step.py
+++ b/awx/lib/site-packages/boto/emr/step.py
@@ -20,6 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class Step(object):
"""
@@ -73,7 +75,7 @@ class JarStep(Step):
self._main_class = main_class
self.action_on_failure = action_on_failure
- if isinstance(step_args, basestring):
+ if isinstance(step_args, six.string_types):
step_args = [step_args]
self.step_args = step_args
@@ -143,7 +145,7 @@ class StreamingStep(Step):
self.output = output
self._jar = jar
- if isinstance(step_args, basestring):
+ if isinstance(step_args, six.string_types):
step_args = [step_args]
self.step_args = step_args
diff --git a/awx/lib/site-packages/boto/exception.py b/awx/lib/site-packages/boto/exception.py
index 99205c9f28..9baa0999d9 100644
--- a/awx/lib/site-packages/boto/exception.py
+++ b/awx/lib/site-packages/boto/exception.py
@@ -26,10 +26,12 @@ Exception classes - Subclassing allows you to check for specific errors
"""
import base64
import xml.sax
-from boto import handler
-from boto.compat import json
-from boto.resultset import ResultSet
+import boto
+
+from boto import handler
+from boto.compat import json, six, StandardError
+from boto.resultset import ResultSet
class BotoClientError(StandardError):
"""
@@ -80,34 +82,56 @@ class BotoServerError(StandardError):
self.request_id = None
self.error_code = None
self._error_message = None
+ self.message = ''
self.box_usage = None
+ if isinstance(self.body, bytes):
+ try:
+ self.body = self.body.decode('utf-8')
+ except UnicodeDecodeError:
+ boto.log.debug('Unable to decode body from bytes!')
+
# Attempt to parse the error response. If body isn't present,
# then just ignore the error response.
if self.body:
- try:
- h = handler.XmlHandlerWrapper(self, self)
- h.parseString(self.body)
- except (TypeError, xml.sax.SAXParseException), pe:
- # What if it's JSON? Let's try that.
+ # Check if it looks like a ``dict``.
+ if hasattr(self.body, 'items'):
+ # It's not a string, so trying to parse it will fail.
+ # But since it's data, we can work with that.
+ self.request_id = self.body.get('RequestId', None)
+
+ if 'Error' in self.body:
+ # XML-style
+ error = self.body.get('Error', {})
+ self.error_code = error.get('Code', None)
+ self.message = error.get('Message', None)
+ else:
+ # JSON-style.
+ self.message = self.body.get('message', None)
+ else:
try:
- parsed = json.loads(self.body)
+ h = handler.XmlHandlerWrapper(self, self)
+ h.parseString(self.body)
+ except (TypeError, xml.sax.SAXParseException) as pe:
+ # What if it's JSON? Let's try that.
+ try:
+ parsed = json.loads(self.body)
- if 'RequestId' in parsed:
- self.request_id = parsed['RequestId']
- if 'Error' in parsed:
- if 'Code' in parsed['Error']:
- self.error_code = parsed['Error']['Code']
- if 'Message' in parsed['Error']:
- self.message = parsed['Error']['Message']
+ if 'RequestId' in parsed:
+ self.request_id = parsed['RequestId']
+ if 'Error' in parsed:
+ if 'Code' in parsed['Error']:
+ self.error_code = parsed['Error']['Code']
+ if 'Message' in parsed['Error']:
+ self.message = parsed['Error']['Message']
- except ValueError:
- # Remove unparsable message body so we don't include garbage
- # in exception. But first, save self.body in self.error_message
- # because occasionally we get error messages from Eucalyptus
- # that are just text strings that we want to preserve.
- self.message = self.body
- self.body = None
+ except (TypeError, ValueError):
+ # Remove unparsable message body so we don't include garbage
+ # in exception. But first, save self.body in self.error_message
+ # because occasionally we get error messages from Eucalyptus
+ # that are just text strings that we want to preserve.
+ self.message = self.body
+ self.body = None
def __getattr__(self, name):
if name == 'error_message':
diff --git a/awx/lib/site-packages/boto/file/__init__.py b/awx/lib/site-packages/boto/file/__init__.py
index 0210b47c6e..837a164f47 100644
--- a/awx/lib/site-packages/boto/file/__init__.py
+++ b/awx/lib/site-packages/boto/file/__init__.py
@@ -21,8 +21,8 @@
import boto
-from connection import FileConnection as Connection
-from key import Key
-from bucket import Bucket
+from boto.file.connection import FileConnection as Connection
+from boto.file.key import Key
+from boto.file.bucket import Bucket
__all__ = ['Connection', 'Key', 'Bucket']
diff --git a/awx/lib/site-packages/boto/file/bucket.py b/awx/lib/site-packages/boto/file/bucket.py
index 8aec677317..d49755eaae 100644
--- a/awx/lib/site-packages/boto/file/bucket.py
+++ b/awx/lib/site-packages/boto/file/bucket.py
@@ -23,7 +23,7 @@
# File representation of bucket, for use with "file://" URIs.
import os
-from key import Key
+from boto.file.key import Key
from boto.file.simpleresultset import SimpleResultSet
from boto.s3.bucketlistresultset import BucketListResultSet
diff --git a/awx/lib/site-packages/boto/file/connection.py b/awx/lib/site-packages/boto/file/connection.py
index f453f71e04..2507e2db0b 100644
--- a/awx/lib/site-packages/boto/file/connection.py
+++ b/awx/lib/site-packages/boto/file/connection.py
@@ -21,7 +21,7 @@
# File representation of connection, for use with "file://" URIs.
-from bucket import Bucket
+from boto.file.bucket import Bucket
class FileConnection(object):
diff --git a/awx/lib/site-packages/boto/file/key.py b/awx/lib/site-packages/boto/file/key.py
index 2f20cae5b9..3ec345d464 100644
--- a/awx/lib/site-packages/boto/file/key.py
+++ b/awx/lib/site-packages/boto/file/key.py
@@ -22,9 +22,11 @@
# File representation of key, for use with "file://" URIs.
-import os, shutil, StringIO
+import os, shutil
import sys
+from boto.compat import StringIO
+
class Key(object):
KEY_STREAM_READABLE = 0x01
@@ -182,7 +184,7 @@ class Key(object):
:returns: The contents of the file as a string
"""
- fp = StringIO.StringIO()
+ fp = StringIO()
self.get_contents_to_file(fp)
return fp.getvalue()
diff --git a/awx/lib/site-packages/boto/fps/connection.py b/awx/lib/site-packages/boto/fps/connection.py
index dd9b235184..6dc90a248e 100644
--- a/awx/lib/site-packages/boto/fps/connection.py
+++ b/awx/lib/site-packages/boto/fps/connection.py
@@ -86,7 +86,7 @@ def needs_caller_reference(func):
def api_action(*api):
def decorator(func):
- action = ''.join(api or map(str.capitalize, func.func_name.split('_')))
+ action = ''.join(api or map(str.capitalize, func.__name__.split('_')))
response = ResponseFactory(action)
if hasattr(boto.fps.response, action + 'Response'):
response = getattr(boto.fps.response, action + 'Response')
diff --git a/awx/lib/site-packages/boto/fps/response.py b/awx/lib/site-packages/boto/fps/response.py
index 94c8d15199..c0a9e2837f 100644
--- a/awx/lib/site-packages/boto/fps/response.py
+++ b/awx/lib/site-packages/boto/fps/response.py
@@ -22,6 +22,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from decimal import Decimal
+from boto.compat import filter, map
def ResponseFactory(action):
diff --git a/awx/lib/site-packages/boto/glacier/concurrent.py b/awx/lib/site-packages/boto/glacier/concurrent.py
index dc54081998..a4f3a224a0 100644
--- a/awx/lib/site-packages/boto/glacier/concurrent.py
+++ b/awx/lib/site-packages/boto/glacier/concurrent.py
@@ -19,21 +19,20 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from __future__ import with_statement
-
import os
import math
import threading
import hashlib
import time
import logging
-from Queue import Queue, Empty
+from boto.compat import Queue
import binascii
-from .utils import DEFAULT_PART_SIZE, minimum_part_size, chunk_hashes, \
- tree_hash, bytes_to_hex
-from .exceptions import UploadArchiveError, DownloadArchiveError, \
- TreeHashDoesNotMatchError
+from boto.glacier.utils import DEFAULT_PART_SIZE, minimum_part_size, \
+ chunk_hashes, tree_hash, bytes_to_hex
+from boto.glacier.exceptions import UploadArchiveError, \
+ DownloadArchiveError, \
+ TreeHashDoesNotMatchError
_END_SENTINEL = object()
@@ -68,9 +67,9 @@ class ConcurrentTransferer(object):
def _add_work_items_to_queue(self, total_parts, worker_queue, part_size):
log.debug("Adding work items to queue.")
- for i in xrange(total_parts):
+ for i in range(total_parts):
worker_queue.put((i, part_size))
- for i in xrange(self._num_threads):
+ for i in range(self._num_threads):
worker_queue.put(_END_SENTINEL)
@@ -146,7 +145,7 @@ class ConcurrentUploader(ConcurrentTransferer):
try:
self._wait_for_upload_threads(hash_chunks, result_queue,
total_parts)
- except UploadArchiveError, e:
+ except UploadArchiveError as e:
log.debug("An error occurred while uploading an archive, "
"aborting multipart upload.")
self._api.abort_multipart_upload(self._vault_name, upload_id)
@@ -159,7 +158,7 @@ class ConcurrentUploader(ConcurrentTransferer):
return response['ArchiveId']
def _wait_for_upload_threads(self, hash_chunks, result_queue, total_parts):
- for _ in xrange(total_parts):
+ for _ in range(total_parts):
result = result_queue.get()
if isinstance(result, Exception):
log.debug("An error was found in the result queue, terminating "
@@ -177,7 +176,7 @@ class ConcurrentUploader(ConcurrentTransferer):
def _start_upload_threads(self, result_queue, upload_id, worker_queue,
filename):
log.debug("Starting threads.")
- for _ in xrange(self._num_threads):
+ for _ in range(self._num_threads):
thread = UploadWorkerThread(self._api, self._vault_name, filename,
upload_id, worker_queue, result_queue)
time.sleep(0.2)
@@ -231,11 +230,11 @@ class UploadWorkerThread(TransferThread):
def _process_chunk(self, work):
result = None
- for i in xrange(self._num_retries + 1):
+ for i in range(self._num_retries + 1):
try:
result = self._upload_chunk(work)
break
- except self._retry_exceptions, e:
+ except self._retry_exceptions as e:
log.error("Exception caught uploading part number %s for "
"vault %s, attempt: (%s / %s), filename: %s, "
"exception: %s, msg: %s",
@@ -306,7 +305,7 @@ class ConcurrentDownloader(ConcurrentTransferer):
self._start_download_threads(result_queue, worker_queue)
try:
self._wait_for_download_threads(filename, result_queue, total_parts)
- except DownloadArchiveError, e:
+ except DownloadArchiveError as e:
log.debug("An error occurred while downloading an archive: %s", e)
raise e
log.debug("Download completed.")
@@ -324,7 +323,7 @@ class ConcurrentDownloader(ConcurrentTransferer):
"""
hash_chunks = [None] * total_parts
with open(filename, "wb") as f:
- for _ in xrange(total_parts):
+ for _ in range(total_parts):
result = result_queue.get()
if isinstance(result, Exception):
log.debug("An error was found in the result queue, "
@@ -352,7 +351,7 @@ class ConcurrentDownloader(ConcurrentTransferer):
def _start_download_threads(self, result_queue, worker_queue):
log.debug("Starting threads.")
- for _ in xrange(self._num_threads):
+ for _ in range(self._num_threads):
thread = DownloadWorkerThread(self._job, worker_queue, result_queue)
time.sleep(0.2)
thread.start()
@@ -393,11 +392,11 @@ class DownloadWorkerThread(TransferThread):
:param work:
"""
result = None
- for _ in xrange(self._num_retries):
+ for _ in range(self._num_retries):
try:
result = self._download_chunk(work)
break
- except self._retry_exceptions, e:
+ except self._retry_exceptions as e:
log.error("Exception caught downloading part number %s for "
"job %s", work[0], self._job,)
time.sleep(self._time_between_retries)
diff --git a/awx/lib/site-packages/boto/glacier/job.py b/awx/lib/site-packages/boto/glacier/job.py
index d26d6b40dc..33e66a196c 100644
--- a/awx/lib/site-packages/boto/glacier/job.py
+++ b/awx/lib/site-packages/boto/glacier/job.py
@@ -20,12 +20,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from __future__ import with_statement
import math
import socket
-from .exceptions import TreeHashDoesNotMatchError, DownloadArchiveError
-from .utils import tree_hash_from_str
+from boto.glacier.exceptions import TreeHashDoesNotMatchError, \
+ DownloadArchiveError
+from boto.glacier.utils import tree_hash_from_str
class Job(object):
@@ -123,7 +123,7 @@ class Job(object):
verify_hashes, retry_exceptions)
def download_to_fileobj(self, output_file, chunk_size=DefaultPartSize,
- verify_hashes=True,
+ verify_hashes=True,
retry_exceptions=(socket.error,)):
"""Download an archive to a file object.
@@ -146,7 +146,7 @@ class Job(object):
def _download_to_fileob(self, fileobj, num_chunks, chunk_size, verify_hashes,
retry_exceptions):
- for i in xrange(num_chunks):
+ for i in range(num_chunks):
byte_range = ((i * chunk_size), ((i + 1) * chunk_size) - 1)
data, expected_tree_hash = self._download_byte_range(
byte_range, retry_exceptions)
@@ -163,13 +163,13 @@ class Job(object):
# You can occasionally get socket.errors when downloading
# chunks from Glacier, so each chunk can be retried up
# to 5 times.
- for _ in xrange(5):
+ for _ in range(5):
try:
response = self.get_output(byte_range)
data = response.read()
expected_tree_hash = response['TreeHash']
return data, expected_tree_hash
- except retry_exceptions, e:
+ except retry_exceptions as e:
continue
else:
raise DownloadArchiveError("There was an error downloading"
diff --git a/awx/lib/site-packages/boto/glacier/layer1.py b/awx/lib/site-packages/boto/glacier/layer1.py
index f46fbf0971..39136cf03f 100644
--- a/awx/lib/site-packages/boto/glacier/layer1.py
+++ b/awx/lib/site-packages/boto/glacier/layer1.py
@@ -27,9 +27,9 @@ import os
import boto.glacier
from boto.compat import json
from boto.connection import AWSAuthConnection
-from .exceptions import UnexpectedHTTPResponseError
-from .response import GlacierResponse
-from .utils import ResettingFileSender
+from boto.glacier.exceptions import UnexpectedHTTPResponseError
+from boto.glacier.response import GlacierResponse
+from boto.glacier.utils import ResettingFileSender
class Layer1(AWSAuthConnection):
@@ -89,12 +89,13 @@ class Layer1(AWSAuthConnection):
self.region = region
self.account_id = account_id
super(Layer1, self).__init__(region.endpoint,
- aws_access_key_id, aws_secret_access_key,
- is_secure, port, proxy, proxy_port,
- proxy_user, proxy_pass, debug,
- https_connection_factory,
- path, provider, security_token,
- suppress_consec_slashes, profile_name=profile_name)
+ aws_access_key_id, aws_secret_access_key,
+ is_secure, port, proxy, proxy_port,
+ proxy_user, proxy_pass, debug,
+ https_connection_factory,
+ path, provider, security_token,
+ suppress_consec_slashes,
+ profile_name=profile_name)
def _required_auth_capability(self):
return ['hmac-v4']
@@ -107,10 +108,10 @@ class Layer1(AWSAuthConnection):
headers['x-amz-glacier-version'] = self.Version
uri = '/%s/%s' % (self.account_id, resource)
response = super(Layer1, self).make_request(verb, uri,
- params=params,
- headers=headers,
- sender=sender,
- data=data)
+ params=params,
+ headers=headers,
+ sender=sender,
+ data=data)
if response.status in ok_responses:
return GlacierResponse(response, response_headers)
else:
@@ -826,9 +827,9 @@ class Layer1(AWSAuthConnection):
else:
sender = None
return self.make_request('POST', uri, headers=headers,
- sender=sender,
- data=archive, ok_responses=(201,),
- response_headers=response_headers)
+ sender=sender,
+ data=archive, ok_responses=(201,),
+ response_headers=response_headers)
def _is_file_like(self, archive):
return hasattr(archive, 'seek') and hasattr(archive, 'tell')
diff --git a/awx/lib/site-packages/boto/glacier/layer2.py b/awx/lib/site-packages/boto/glacier/layer2.py
index d27f62d19d..abc36199bb 100644
--- a/awx/lib/site-packages/boto/glacier/layer2.py
+++ b/awx/lib/site-packages/boto/glacier/layer2.py
@@ -21,8 +21,8 @@
# IN THE SOFTWARE.
#
-from .layer1 import Layer1
-from .vault import Vault
+from boto.glacier.layer1 import Layer1
+from boto.glacier.vault import Vault
class Layer2(object):
diff --git a/awx/lib/site-packages/boto/glacier/response.py b/awx/lib/site-packages/boto/glacier/response.py
index 78d9f5f935..a67ec61d5f 100644
--- a/awx/lib/site-packages/boto/glacier/response.py
+++ b/awx/lib/site-packages/boto/glacier/response.py
@@ -37,7 +37,7 @@ class GlacierResponse(dict):
for header_name, item_name in response_headers:
self[item_name] = http_response.getheader(header_name)
if http_response.getheader('Content-Type') == 'application/json':
- body = json.loads(http_response.read())
+ body = json.loads(http_response.read().decode('utf-8'))
self.update(body)
size = http_response.getheader('Content-Length', None)
if size is not None:
diff --git a/awx/lib/site-packages/boto/glacier/utils.py b/awx/lib/site-packages/boto/glacier/utils.py
index af779f5cc3..98847e3f16 100644
--- a/awx/lib/site-packages/boto/glacier/utils.py
+++ b/awx/lib/site-packages/boto/glacier/utils.py
@@ -21,6 +21,9 @@
#
import hashlib
import math
+import binascii
+
+from boto.compat import six
_MEGABYTE = 1024 * 1024
@@ -71,12 +74,12 @@ def minimum_part_size(size_in_bytes, default_part_size=DEFAULT_PART_SIZE):
def chunk_hashes(bytestring, chunk_size=_MEGABYTE):
chunk_count = int(math.ceil(len(bytestring) / float(chunk_size)))
hashes = []
- for i in xrange(chunk_count):
+ for i in range(chunk_count):
start = i * chunk_size
end = (i + 1) * chunk_size
hashes.append(hashlib.sha256(bytestring[start:end]).digest())
if not hashes:
- return [hashlib.sha256('').digest()]
+ return [hashlib.sha256(b'').digest()]
return hashes
@@ -121,20 +124,29 @@ def compute_hashes_from_fileobj(fileobj, chunk_size=1024 * 1024):
are returned in hex.
"""
+ # Python 3+, not binary
+ if six.PY3 and hasattr(fileobj, 'mode') and 'b' not in fileobj.mode:
+ raise ValueError('File-like object must be opened in binary mode!')
+
linear_hash = hashlib.sha256()
chunks = []
chunk = fileobj.read(chunk_size)
while chunk:
+ # It's possible to get a file-like object that has no mode (checked
+ # above) and returns something other than bytes (e.g. str). So here
+ # we try to catch that and encode to bytes.
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode(getattr(fileobj, 'encoding', '') or 'utf-8')
linear_hash.update(chunk)
chunks.append(hashlib.sha256(chunk).digest())
chunk = fileobj.read(chunk_size)
if not chunks:
- chunks = [hashlib.sha256('').digest()]
+ chunks = [hashlib.sha256(b'').digest()]
return linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks))
def bytes_to_hex(str_as_bytes):
- return ''.join(["%02x" % ord(x) for x in str_as_bytes]).strip()
+ return binascii.hexlify(str_as_bytes)
def tree_hash_from_str(str_as_bytes):
diff --git a/awx/lib/site-packages/boto/glacier/vault.py b/awx/lib/site-packages/boto/glacier/vault.py
index e7d4e27d24..45d276cadb 100644
--- a/awx/lib/site-packages/boto/glacier/vault.py
+++ b/awx/lib/site-packages/boto/glacier/vault.py
@@ -21,12 +21,13 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from __future__ import with_statement
-from .exceptions import UploadArchiveError
-from .job import Job
-from .writer import compute_hashes_from_fileobj, resume_file_upload, Writer
-from .concurrent import ConcurrentUploader
-from .utils import minimum_part_size, DEFAULT_PART_SIZE
+import codecs
+from boto.glacier.exceptions import UploadArchiveError
+from boto.glacier.job import Job
+from boto.glacier.writer import compute_hashes_from_fileobj, \
+ resume_file_upload, Writer
+from boto.glacier.concurrent import ConcurrentUploader
+from boto.glacier.utils import minimum_part_size, DEFAULT_PART_SIZE
import os.path
@@ -54,8 +55,6 @@ class Vault(object):
if response_data:
for response_name, attr_name, default in self.ResponseDataElements:
value = response_data[response_name]
- if isinstance(value, unicode):
- value = value.encode('utf8')
setattr(self, attr_name, value)
else:
for response_name, attr_name, default in self.ResponseDataElements:
@@ -227,7 +226,7 @@ class Vault(object):
for part_desc in part_list_response['Parts']:
part_index = self._range_string_to_part_index(
part_desc['RangeInBytes'], part_size)
- part_tree_hash = part_desc['SHA256TreeHash'].decode('hex')
+ part_tree_hash = codecs.decode(part_desc['SHA256TreeHash'], 'hex_codec')
part_hash_map[part_index] = part_tree_hash
if not file_obj:
@@ -343,9 +342,9 @@ class Vault(object):
rparams = {}
if start_date is not None:
- rparams['StartDate'] = start_date.isoformat()
+ rparams['StartDate'] = start_date.strftime('%Y-%m-%dT%H:%M:%S%Z')
if end_date is not None:
- rparams['EndDate'] = end_date.isoformat()
+ rparams['EndDate'] = end_date.strftime('%Y-%m-%dT%H:%M:%S%Z')
if limit is not None:
rparams['Limit'] = limit
diff --git a/awx/lib/site-packages/boto/glacier/writer.py b/awx/lib/site-packages/boto/glacier/writer.py
index ad0ab265ba..fa3161ab3c 100644
--- a/awx/lib/site-packages/boto/glacier/writer.py
+++ b/awx/lib/site-packages/boto/glacier/writer.py
@@ -53,7 +53,7 @@ class _Partitioner(object):
self._buffer_size = 0
def write(self, data):
- if data == '':
+ if data == b'':
return
self._buffer.append(data)
self._buffer_size += len(data)
@@ -61,7 +61,7 @@ class _Partitioner(object):
self._send_part()
def _send_part(self):
- data = ''.join(self._buffer)
+ data = b''.join(self._buffer)
# Put back any data remaining over the part size into the
# buffer
if len(data) > self.part_size:
@@ -164,7 +164,7 @@ class _Uploader(object):
def generate_parts_from_fobj(fobj, part_size):
data = fobj.read(part_size)
while data:
- yield data
+ yield data.encode('utf-8')
data = fobj.read(part_size)
diff --git a/awx/lib/site-packages/boto/gs/bucket.py b/awx/lib/site-packages/boto/gs/bucket.py
index 3b706408b8..37636fb876 100644
--- a/awx/lib/site-packages/boto/gs/bucket.py
+++ b/awx/lib/site-packages/boto/gs/bucket.py
@@ -37,6 +37,7 @@ from boto.gs.key import Key as GSKey
from boto.s3.acl import Policy
from boto.s3.bucket import Bucket as S3Bucket
from boto.utils import get_utf8_value
+from boto.compat import six
# constants for http query args
DEF_OBJ_ACL = 'defaultObjectAcl'
@@ -100,12 +101,12 @@ class Bucket(S3Bucket):
if generation:
query_args_l.append('generation=%s' % generation)
if response_headers:
- for rk, rv in response_headers.iteritems():
+ for rk, rv in six.iteritems(response_headers):
query_args_l.append('%s=%s' % (rk, urllib.quote(rv)))
try:
key, resp = self._get_key_internal(key_name, headers,
query_args_l=query_args_l)
- except GSResponseError, e:
+ except GSResponseError as e:
if e.status == 403 and 'Forbidden' in e.reason:
# If we failed getting an object, let the user know which object
# failed rather than just returning a generic 403.
diff --git a/awx/lib/site-packages/boto/gs/cors.py b/awx/lib/site-packages/boto/gs/cors.py
index e5dd918414..1c5cfd0c7b 100644
--- a/awx/lib/site-packages/boto/gs/cors.py
+++ b/awx/lib/site-packages/boto/gs/cors.py
@@ -156,7 +156,7 @@ class Cors(handler.ContentHandler):
s += '<' + collection + '>'
# If collection elements has type string, append atomic value,
# otherwise, append sequence of values in named tags.
- if isinstance(elements_or_value, types.StringTypes):
+ if isinstance(elements_or_value, str):
s += elements_or_value
else:
for (name, value) in elements_or_value:
diff --git a/awx/lib/site-packages/boto/gs/key.py b/awx/lib/site-packages/boto/gs/key.py
index 277e7c7150..c4fcf01f09 100644
--- a/awx/lib/site-packages/boto/gs/key.py
+++ b/awx/lib/site-packages/boto/gs/key.py
@@ -23,7 +23,8 @@ import base64
import binascii
import os
import re
-import StringIO
+
+from boto.compat import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
from boto.s3.keyfile import KeyFile
@@ -410,19 +411,20 @@ class Key(S3Key):
contents.
:type fp: file
- :param fp: the file whose contents are to be uploaded
+ :param fp: The file whose contents are to be uploaded.
:type headers: dict
- :param headers: additional HTTP headers to be sent with the PUT request.
+ :param headers: (optional) Additional HTTP headers to be sent with the
+ PUT request.
:type replace: bool
- :param replace: If this parameter is False, the method will first check
- to see if an object exists in the bucket with the same key. If it
- does, it won't overwrite it. The default value is True which will
- overwrite the object.
+ :param replace: (optional) If this parameter is False, the method will
+ first check to see if an object exists in the bucket with the same
+ key. If it does, it won't overwrite it. The default value is True
+ which will overwrite the object.
:type cb: function
- :param cb: a callback function that will be called to report
+ :param cb: (optional) Callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
@@ -435,43 +437,44 @@ class Key(S3Key):
during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
- :param policy: A canned ACL policy that will be applied to the new key
- in GS.
+ :param policy: (optional) A canned ACL policy that will be applied to
+ the new key in GS.
- :type md5: A tuple containing the hexdigest version of the MD5 checksum
- of the file as the first element and the Base64-encoded version of
- the plain checksum as the second element. This is the same format
- returned by the compute_md5 method.
- :param md5: If you need to compute the MD5 for any reason prior to
- upload, it's silly to have to do it twice so this param, if present,
- will be used as the MD5 values of the file. Otherwise, the checksum
- will be computed.
+ :type md5: tuple
+ :param md5: (optional) A tuple containing the hexdigest version of the
+ MD5 checksum of the file as the first element and the
+ Base64-encoded version of the plain checksum as the second element.
+ This is the same format returned by the compute_md5 method.
- :type res_upload_handler: ResumableUploadHandler
- :param res_upload_handler: If provided, this handler will perform the
- upload.
+ If you need to compute the MD5 for any reason prior to upload, it's
+ silly to have to do it twice so this param, if present, will be
+ used as the MD5 values of the file. Otherwise, the checksum will be
+ computed.
+
+ :type res_upload_handler: :py:class:`boto.gs.resumable_upload_handler.ResumableUploadHandler`
+ :param res_upload_handler: (optional) If provided, this handler will
+ perform the upload.
:type size: int
- :param size: (optional) The Maximum number of bytes to read from
- the file pointer (fp). This is useful when uploading
- a file in multiple parts where you are splitting the
- file up into different ranges to be uploaded. If not
- specified, the default behaviour is to read all bytes
- from the file pointer. Less bytes may be available.
+ :param size: (optional) The Maximum number of bytes to read from the
+ file pointer (fp). This is useful when uploading a file in multiple
+ parts where you are splitting the file up into different ranges to
+ be uploaded. If not specified, the default behaviour is to read all
+ bytes from the file pointer. Less bytes may be available.
+
Notes:
- 1. The "size" parameter currently cannot be used when
- a resumable upload handler is given but is still
- useful for uploading part of a file as implemented
- by the parent class.
- 2. At present Google Cloud Storage does not support
- multipart uploads.
+ 1. The "size" parameter currently cannot be used when a
+ resumable upload handler is given but is still useful for
+ uploading part of a file as implemented by the parent class.
+ 2. At present Google Cloud Storage does not support multipart
+ uploads.
:type rewind: bool
- :param rewind: (optional) If True, the file pointer (fp) will be
- rewound to the start before any bytes are read from
- it. The default behaviour is False which reads from
- the current position of the file pointer (fp).
+ :param rewind: (optional) If True, the file pointer (fp) will be
+ rewound to the start before any bytes are read from it. The default
+ behaviour is False which reads from the current position of the
+ file pointer (fp).
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
@@ -588,44 +591,47 @@ class Key(S3Key):
parameters.
:type filename: string
- :param filename: The name of the file that you want to put onto GS
+ :param filename: The name of the file that you want to put onto GS.
:type headers: dict
- :param headers: Additional headers to pass along with the request to GS.
+ :param headers: (optional) Additional headers to pass along with the
+ request to GS.
:type replace: bool
- :param replace: If True, replaces the contents of the file if it
- already exists.
+ :param replace: (optional) If True, replaces the contents of the file
+ if it already exists.
:type cb: function
- :param cb: (optional) a callback function that will be called to report
- progress on the download. The callback should accept two integer
+ :param cb: (optional) Callback function that will be called to report
+ progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
- been successfully transmitted from GS and the second representing
- the total number of bytes that need to be transmitted.
+ been successfully transmitted to GS and the second representing the
+ total number of bytes that need to be transmitted.
- :type cb: int
+ :type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer.
- :type policy: :class:`boto.gs.acl.CannedACLStrings`
- :param policy: A canned ACL policy that will be applied to the new key
- in GS.
+ :type policy: :py:attribute:`boto.gs.acl.CannedACLStrings`
+ :param policy: (optional) A canned ACL policy that will be applied to
+ the new key in GS.
- :type md5: A tuple containing the hexdigest version of the MD5 checksum
- of the file as the first element and the Base64-encoded version of
- the plain checksum as the second element. This is the same format
- returned by the compute_md5 method.
- :param md5: If you need to compute the MD5 for any reason prior to
- upload, it's silly to have to do it twice so this param, if present,
- will be used as the MD5 values of the file. Otherwise, the checksum
- will be computed.
+ :type md5: tuple
+ :param md5: (optional) A tuple containing the hexdigest version of the
+ MD5 checksum of the file as the first element and the
+ Base64-encoded version of the plain checksum as the second element.
+ This is the same format returned by the compute_md5 method.
- :type res_upload_handler: ResumableUploadHandler
- :param res_upload_handler: If provided, this handler will perform the
- upload.
+ If you need to compute the MD5 for any reason prior to upload, it's
+ silly to have to do it twice so this param, if present, will be
+ used as the MD5 values of the file. Otherwise, the checksum will be
+ computed.
+
+ :type res_upload_handler: :py:class:`boto.gs.resumable_upload_handler.ResumableUploadHandler`
+ :param res_upload_handler: (optional) If provided, this handler will
+ perform the upload.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
@@ -699,7 +705,7 @@ class Key(S3Key):
self.md5 = None
self.base64md5 = None
- fp = StringIO.StringIO(get_utf8_value(s))
+ fp = StringIO(get_utf8_value(s))
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5,
if_generation=if_generation)
diff --git a/awx/lib/site-packages/boto/gs/resumable_upload_handler.py b/awx/lib/site-packages/boto/gs/resumable_upload_handler.py
index d3835e3a2b..d74434693d 100644
--- a/awx/lib/site-packages/boto/gs/resumable_upload_handler.py
+++ b/awx/lib/site-packages/boto/gs/resumable_upload_handler.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import errno
import httplib
import os
@@ -27,16 +26,13 @@ import re
import socket
import time
import urlparse
+from hashlib import md5
from boto import config, UserAgent
from boto.connection import AWSAuthConnection
from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
from boto.s3.keyfile import KeyFile
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
"""
Handler for Google Cloud Storage resumable uploads. See
@@ -98,7 +94,7 @@ class ResumableUploadHandler(object):
f = open(self.tracker_file_name, 'r')
uri = f.readline().strip()
self._set_tracker_uri(uri)
- except IOError, e:
+ except IOError as e:
# Ignore non-existent file (happens first time an upload
# is attempted on a file), but warn user for other errors.
if e.errno != errno.ENOENT:
@@ -106,7 +102,7 @@ class ResumableUploadHandler(object):
print('Couldn\'t read URI tracker file (%s): %s. Restarting '
'upload from scratch.' %
(self.tracker_file_name, e.strerror))
- except InvalidUriError, e:
+ except InvalidUriError as e:
# Warn user, but proceed (will restart because
# self.tracker_uri is None).
print('Invalid tracker URI (%s) found in URI tracker file '
@@ -125,9 +121,9 @@ class ResumableUploadHandler(object):
f = None
try:
with os.fdopen(os.open(self.tracker_file_name,
- os.O_WRONLY | os.O_CREAT, 0600), 'w') as f:
+ os.O_WRONLY | os.O_CREAT, 0o600), 'w') as f:
f.write(self.tracker_uri)
- except IOError, e:
+ except IOError as e:
raise ResumableUploadException(
'Couldn\'t write URI tracker file (%s): %s.\nThis can happen'
'if you\'re using an incorrectly configured upload tool\n'
@@ -256,7 +252,7 @@ class ResumableUploadHandler(object):
'Couldn\'t parse upload server state query response (%s)' %
str(resp.getheaders()), ResumableTransferDisposition.START_OVER)
if conn.debug >= 1:
- print 'Server has: Range: %d - %d.' % (server_start, server_end)
+ print('Server has: Range: %d - %d.' % (server_start, server_end))
return (server_start, server_end)
def _start_new_resumable_upload(self, key, headers=None):
@@ -267,7 +263,7 @@ class ResumableUploadHandler(object):
"""
conn = key.bucket.connection
if conn.debug >= 1:
- print 'Starting new resumable upload.'
+ print('Starting new resumable upload.')
self.server_has_bytes = 0
# Start a new resumable upload by sending a POST request with an
@@ -433,7 +429,7 @@ class ResumableUploadHandler(object):
# If the server already has some of the content, we need to
# update the digesters with the bytes that have already been
# uploaded to ensure we get a complete hash in the end.
- print 'Catching up hash digest(s) for resumed upload'
+ print('Catching up hash digest(s) for resumed upload')
fp.seek(0)
# Read local file's bytes through position server has. For
# example, if server has (0, 3) we want to read 3-0+1=4 bytes.
@@ -453,10 +449,10 @@ class ResumableUploadHandler(object):
bytes_to_go -= len(chunk)
if conn.debug >= 1:
- print 'Resuming transfer.'
- except ResumableUploadException, e:
+ print('Resuming transfer.')
+ except ResumableUploadException as e:
if conn.debug >= 1:
- print 'Unable to resume transfer (%s).' % e.message
+ print('Unable to resume transfer (%s).' % e.message)
self._start_new_resumable_upload(key, headers)
else:
self._start_new_resumable_upload(key, headers)
@@ -513,7 +509,7 @@ class ResumableUploadHandler(object):
change some of the file and not realize they have inconsistent data.
"""
if key.bucket.connection.debug >= 1:
- print 'Checking md5 against etag.'
+ print('Checking md5 against etag.')
if key.md5 != etag.strip('"\''):
# Call key.open_read() before attempting to delete the
# (incorrect-content) key, so we perform that request on a
@@ -567,7 +563,7 @@ class ResumableUploadHandler(object):
# Use binary exponential backoff to desynchronize client requests.
sleep_time_secs = random.random() * (2**self.progress_less_iterations)
if debug >= 1:
- print ('Got retryable failure (%d progress-less in a row).\n'
+ print('Got retryable failure (%d progress-less in a row).\n'
'Sleeping %3.1f seconds before re-trying' %
(self.progress_less_iterations, sleep_time_secs))
time.sleep(sleep_time_secs)
@@ -664,9 +660,9 @@ class ResumableUploadHandler(object):
self._check_final_md5(key, etag)
key.generation = self.generation
if debug >= 1:
- print 'Resumable upload complete.'
+ print('Resumable upload complete.')
return
- except self.RETRYABLE_EXCEPTIONS, e:
+ except self.RETRYABLE_EXCEPTIONS as e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
@@ -676,7 +672,7 @@ class ResumableUploadHandler(object):
# the upload (which will cause a new connection to be
# opened the next time an HTTP request is sent).
key.bucket.connection.connection.close()
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.handle_resumable_upload_exception(e, debug)
self.track_progress_less_iterations(server_had_bytes_before_attempt,
diff --git a/awx/lib/site-packages/boto/handler.py b/awx/lib/site-packages/boto/handler.py
index f936ee8893..b079ada64d 100644
--- a/awx/lib/site-packages/boto/handler.py
+++ b/awx/lib/site-packages/boto/handler.py
@@ -19,9 +19,10 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-import StringIO
import xml.sax
+from boto.compat import StringIO
+
class XmlHandler(xml.sax.ContentHandler):
def __init__(self, root_node, connection):
@@ -55,4 +56,4 @@ class XmlHandlerWrapper(object):
self.parser.setFeature(xml.sax.handler.feature_external_ges, 0)
def parseString(self, content):
- return self.parser.parse(StringIO.StringIO(content))
+ return self.parser.parse(StringIO(content))
diff --git a/awx/lib/site-packages/boto/https_connection.py b/awx/lib/site-packages/boto/https_connection.py
index 147119531f..9222fbde00 100644
--- a/awx/lib/site-packages/boto/https_connection.py
+++ b/awx/lib/site-packages/boto/https_connection.py
@@ -19,14 +19,15 @@
"""Extensions to allow HTTPS requests with SSL certificate validation."""
-import httplib
import re
import socket
import ssl
import boto
-class InvalidCertificateException(httplib.HTTPException):
+from boto.compat import six, http_client
+
+class InvalidCertificateException(http_client.HTTPException):
"""Raised when a certificate is provided with an invalid hostname."""
def __init__(self, host, cert, reason):
@@ -36,7 +37,7 @@ class InvalidCertificateException(httplib.HTTPException):
host: The hostname the connection was made to.
cert: The SSL certificate (as a dictionary) the host returned.
"""
- httplib.HTTPException.__init__(self)
+ http_client.HTTPException.__init__(self)
self.host = host
self.cert = cert
self.reason = reason
@@ -79,10 +80,10 @@ def ValidateCertificateHostname(cert, hostname):
return False
-class CertValidatingHTTPSConnection(httplib.HTTPConnection):
+class CertValidatingHTTPSConnection(http_client.HTTPConnection):
"""An HTTPConnection that connects over SSL and validates certificates."""
- default_port = httplib.HTTPS_PORT
+ default_port = http_client.HTTPS_PORT
def __init__(self, host, port=default_port, key_file=None, cert_file=None,
ca_certs=None, strict=None, **kwargs):
@@ -98,17 +99,23 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection):
strict: When true, causes BadStatusLine to be raised if the status line
can't be parsed as a valid HTTP/1.0 or 1.1 status line.
"""
- httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs)
+ if six.PY2:
+ # Python 3.2 and newer have deprecated and removed the strict
+ # parameter. Since the params are supported as keyword arguments
+ # we conditionally add it here.
+ kwargs['strict'] = strict
+
+ http_client.HTTPConnection.__init__(self, host=host, port=port, **kwargs)
self.key_file = key_file
self.cert_file = cert_file
self.ca_certs = ca_certs
def connect(self):
"Connect to a host on a given (SSL) port."
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(self, "timeout") and self.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
- sock.settimeout(self.timeout)
- sock.connect((self.host, self.port))
+ if hasattr(self, "timeout"):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ else:
+ sock = socket.create_connection((self.host, self.port))
msg = "wrapping ssl socket; "
if self.ca_certs:
msg += "CA certificate file=%s" %self.ca_certs
diff --git a/awx/lib/site-packages/boto/iam/connection.py b/awx/lib/site-packages/boto/iam/connection.py
index 4872b27493..da242c2c69 100644
--- a/awx/lib/site-packages/boto/iam/connection.py
+++ b/awx/lib/site-packages/boto/iam/connection.py
@@ -21,16 +21,37 @@
# IN THE SOFTWARE.
import boto
import boto.jsonresponse
-from boto.compat import json
+from boto.compat import json, six
from boto.resultset import ResultSet
from boto.iam.summarymap import SummaryMap
from boto.connection import AWSQueryConnection
-
-ASSUME_ROLE_POLICY_DOCUMENT = json.dumps({
- 'Statement': [{'Principal': {'Service': ['ec2.amazonaws.com']},
- 'Effect': 'Allow',
- 'Action': ['sts:AssumeRole']}]})
+DEFAULT_POLICY_DOCUMENTS = {
+ 'default': {
+ 'Statement': [
+ {
+ 'Principal': {
+ 'Service': ['ec2.amazonaws.com']
+ },
+ 'Effect': 'Allow',
+ 'Action': ['sts:AssumeRole']
+ }
+ ]
+ },
+ 'amazonaws.com.cn': {
+ 'Statement': [
+ {
+ 'Principal': {
+ 'Service': ['ec2.amazonaws.com.cn']
+ },
+ 'Effect': 'Allow',
+ 'Action': ['sts:AssumeRole']
+ }
+ ]
+ },
+}
+# For backward-compatibility, we'll preserve this here.
+ASSUME_ROLE_POLICY_DOCUMENT = json.dumps(DEFAULT_POLICY_DOCUMENTS['default'])
class IAMConnection(AWSQueryConnection):
@@ -40,7 +61,7 @@ class IAMConnection(AWSQueryConnection):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, host='iam.amazonaws.com',
- debug=0, https_connection_factory=None, path='/',
+ debug=0, https_connection_factory=None, path='/',
security_token=None, validate_certs=True, profile_name=None):
super(IAMConnection, self).__init__(aws_access_key_id,
aws_secret_access_key,
@@ -1006,13 +1027,35 @@ class IAMConnection(AWSQueryConnection):
:param service: Default service to go to in the console.
"""
alias = self.get_account_alias()
+
if not alias:
raise Exception('No alias associated with this account. Please use iam.create_account_alias() first.')
+ resp = alias.get('list_account_aliases_response', {})
+ result = resp.get('list_account_aliases_result', {})
+ aliases = result.get('account_aliases', [])
+
+ if not len(aliases):
+ raise Exception('No alias associated with this account. Please use iam.create_account_alias() first.')
+
+ # We'll just use the first one we find.
+ alias = aliases[0]
+
if self.host == 'iam.us-gov.amazonaws.com':
- return "https://%s.signin.amazonaws-us-gov.com/console/%s" % (alias, service)
+ return "https://%s.signin.amazonaws-us-gov.com/console/%s" % (
+ alias,
+ service
+ )
+ elif self.host.endswith('amazonaws.com.cn'):
+ return "https://%s.signin.amazonaws.cn/console/%s" % (
+ alias,
+ service
+ )
else:
- return "https://%s.signin.aws.amazon.com/console/%s" % (alias, service)
+ return "https://%s.signin.aws.amazon.com/console/%s" % (
+ alias,
+ service
+ )
def get_account_summary(self):
"""
@@ -1059,6 +1102,30 @@ class IAMConnection(AWSQueryConnection):
params['Path'] = path
return self.get_response('CreateInstanceProfile', params)
+ def _build_policy(self, assume_role_policy_document=None):
+ if assume_role_policy_document is not None:
+ if isinstance(assume_role_policy_document, six.string_types):
+ # Historically, they had to pass a string. If it's a string,
+ # assume the user has already handled it.
+ return assume_role_policy_document
+ else:
+
+ for tld, policy in DEFAULT_POLICY_DOCUMENTS.items():
+ if tld is 'default':
+ # Skip the default. We'll fall back to it if we don't find
+ # anything.
+ continue
+
+ if self.host and self.host.endswith(tld):
+ assume_role_policy_document = policy
+ break
+
+ if not assume_role_policy_document:
+ assume_role_policy_document = DEFAULT_POLICY_DOCUMENTS['default']
+
+ # Dump the policy (either user-supplied ``dict`` or one of the defaults)
+ return json.dumps(assume_role_policy_document)
+
def create_role(self, role_name, assume_role_policy_document=None, path=None):
"""
Creates a new role for your AWS account.
@@ -1070,21 +1137,19 @@ class IAMConnection(AWSQueryConnection):
:type role_name: string
:param role_name: Name of the role to create.
- :type assume_role_policy_document: string
+ :type assume_role_policy_document: ``string`` or ``dict``
:param assume_role_policy_document: The policy that grants an entity
permission to assume the role.
:type path: string
- :param path: The path to the instance profile.
+ :param path: The path to the role.
"""
- params = {'RoleName': role_name}
- if assume_role_policy_document is None:
- # This is the only valid assume_role_policy_document currently, so
- # this is used as a default value if no assume_role_policy_document
- # is provided.
- params['AssumeRolePolicyDocument'] = ASSUME_ROLE_POLICY_DOCUMENT
- else:
- params['AssumeRolePolicyDocument'] = assume_role_policy_document
+ params = {
+ 'RoleName': role_name,
+ 'AssumeRolePolicyDocument': self._build_policy(
+ assume_role_policy_document
+ ),
+ }
if path is not None:
params['Path'] = path
return self.get_response('CreateRole', params)
@@ -1375,7 +1440,7 @@ class IAMConnection(AWSQueryConnection):
Lists the SAML providers in the account.
This operation requires `Signature Version 4`_.
"""
- return self.get_response('ListSAMLProviders', {})
+ return self.get_response('ListSAMLProviders', {}, list_marker='SAMLProviderList')
def get_saml_provider(self, saml_provider_arn):
"""
diff --git a/awx/lib/site-packages/boto/jsonresponse.py b/awx/lib/site-packages/boto/jsonresponse.py
index 5dab5af991..ac3f1b4a81 100644
--- a/awx/lib/site-packages/boto/jsonresponse.py
+++ b/awx/lib/site-packages/boto/jsonresponse.py
@@ -21,7 +21,7 @@
# IN THE SOFTWARE.
import xml.sax
-import utils
+from boto import utils
class XmlHandler(xml.sax.ContentHandler):
@@ -49,6 +49,8 @@ class XmlHandler(xml.sax.ContentHandler):
self.current_text += content
def parse(self, s):
+ if not isinstance(s, bytes):
+ s = s.encode('utf-8')
xml.sax.parseString(s, self)
class Element(dict):
diff --git a/awx/lib/site-packages/boto/kinesis/layer1.py b/awx/lib/site-packages/boto/kinesis/layer1.py
index ede695bed6..d514b06485 100644
--- a/awx/lib/site-packages/boto/kinesis/layer1.py
+++ b/awx/lib/site-packages/boto/kinesis/layer1.py
@@ -20,11 +20,6 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import base64
import boto
@@ -32,6 +27,7 @@ from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.kinesis import exceptions
+from boto.compat import json
class KinesisConnection(AWSQueryConnection):
@@ -293,7 +289,8 @@ class KinesisConnection(AWSQueryConnection):
# Base64 decode the data
if b64_decode:
for record in response.get('Records', []):
- record['Data'] = base64.b64decode(record['Data'])
+ record['Data'] = base64.b64decode(
+ record['Data'].encode('utf-8')).decode('utf-8')
return response
@@ -594,7 +591,8 @@ class KinesisConnection(AWSQueryConnection):
if sequence_number_for_ordering is not None:
params['SequenceNumberForOrdering'] = sequence_number_for_ordering
if b64_encode:
- params['Data'] = base64.b64encode(params['Data'])
+ params['Data'] = base64.b64encode(
+ params['Data'].encode('utf-8')).decode('utf-8')
return self.make_request(action='PutRecord',
body=json.dumps(params))
@@ -695,7 +693,7 @@ class KinesisConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response.getheaders())
boto.log.debug(response_body)
if response.status == 200:
diff --git a/awx/lib/site-packages/boto/logs/__init__.py b/awx/lib/site-packages/boto/logs/__init__.py
new file mode 100644
index 0000000000..2ea075d1ea
--- /dev/null
+++ b/awx/lib/site-packages/boto/logs/__init__.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.regioninfo import get_regions
+
+
+def regions():
+ """
+ Get all available regions for the CloudWatch Logs service.
+
+ :rtype: list
+ :return: A list of :class:`boto.regioninfo.RegionInfo`
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return get_regions('logs', connection_cls=CloudWatchLogsConnection)
+
+
+def connect_to_region(region_name, **kw_params):
+ for region in regions():
+ if region.name == region_name:
+ return region.connect(**kw_params)
+ return None
diff --git a/awx/lib/site-packages/boto/logs/exceptions.py b/awx/lib/site-packages/boto/logs/exceptions.py
new file mode 100644
index 0000000000..49c01fa91c
--- /dev/null
+++ b/awx/lib/site-packages/boto/logs/exceptions.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.exception import BotoServerError
+
+
+class LimitExceededException(BotoServerError):
+ pass
+
+
+class DataAlreadyAcceptedException(BotoServerError):
+ pass
+
+
+class ResourceInUseException(BotoServerError):
+ pass
+
+
+class ServiceUnavailableException(BotoServerError):
+ pass
+
+
+class InvalidParameterException(BotoServerError):
+ pass
+
+
+class ResourceNotFoundException(BotoServerError):
+ pass
+
+
+class ResourceAlreadyExistsException(BotoServerError):
+ pass
+
+
+class OperationAbortedException(BotoServerError):
+ pass
+
+
+class InvalidSequenceTokenException(BotoServerError):
+ pass
diff --git a/awx/lib/site-packages/boto/logs/layer1.py b/awx/lib/site-packages/boto/logs/layer1.py
new file mode 100644
index 0000000000..254e28551d
--- /dev/null
+++ b/awx/lib/site-packages/boto/logs/layer1.py
@@ -0,0 +1,577 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+import boto
+from boto.connection import AWSQueryConnection
+from boto.regioninfo import RegionInfo
+from boto.exception import JSONResponseError
+from boto.logs import exceptions
+from boto.compat import json
+
+
+class CloudWatchLogsConnection(AWSQueryConnection):
+ """
+ Amazon CloudWatch Logs Service API Reference
+ This is the Amazon CloudWatch Logs API Reference . Amazon
+ CloudWatch Logs is a managed service for real time monitoring and
+ archival of application logs. This guide provides detailed
+ information about Amazon CloudWatch Logs actions, data types,
+ parameters, and errors. For detailed information about Amazon
+ CloudWatch Logs features and their associated API calls, go to the
+ `Amazon CloudWatch Logs Developer Guide`_.
+
+ Use the following links to get started using the Amazon CloudWatch
+ API Reference :
+
+
+ + `Actions`_: An alphabetical list of all Amazon CloudWatch Logs
+ actions.
+ + `Data Types`_: An alphabetical list of all Amazon CloudWatch
+ Logs data types.
+ + `Common Parameters`_: Parameters that all Query actions can use.
+ + `Common Errors`_: Client and server errors that all actions can
+ return.
+ + `Regions and Endpoints`_: Itemized regions and endpoints for all
+ AWS products.
+
+
+ In addition to using the Amazon CloudWatch Logs API, you can also
+ use the following SDKs and third-party libraries to access Amazon
+ CloudWatch Logs programmatically.
+
+
+ + `AWS SDK for Java Documentation`_
+ + `AWS SDK for .NET Documentation`_
+ + `AWS SDK for PHP Documentation`_
+ + `AWS SDK for Ruby Documentation`_
+
+
+ Developers in the AWS developer community also provide their own
+ libraries, which you can find at the following AWS developer
+ centers:
+
+
+ + `AWS Java Developer Center`_
+ + `AWS PHP Developer Center`_
+ + `AWS Python Developer Center`_
+ + `AWS Ruby Developer Center`_
+ + `AWS Windows and .NET Developer Center`_
+ """
+ APIVersion = "2014-03-28"
+ DefaultRegionName = "us-east-1"
+ DefaultRegionEndpoint = "logs.us-east-1.amazonaws.com"
+ ServiceName = "CloudWatchLogs"
+ TargetPrefix = "Logs_20140328"
+ ResponseError = JSONResponseError
+
+ _faults = {
+ "LimitExceededException": exceptions.LimitExceededException,
+ "DataAlreadyAcceptedException": exceptions.DataAlreadyAcceptedException,
+ "ResourceInUseException": exceptions.ResourceInUseException,
+ "ServiceUnavailableException": exceptions.ServiceUnavailableException,
+ "InvalidParameterException": exceptions.InvalidParameterException,
+ "ResourceNotFoundException": exceptions.ResourceNotFoundException,
+ "ResourceAlreadyExistsException": exceptions.ResourceAlreadyExistsException,
+ "OperationAbortedException": exceptions.OperationAbortedException,
+ "InvalidSequenceTokenException": exceptions.InvalidSequenceTokenException,
+ }
+
+
+ def __init__(self, **kwargs):
+ region = kwargs.pop('region', None)
+ if not region:
+ region = RegionInfo(self, self.DefaultRegionName,
+ self.DefaultRegionEndpoint)
+
+ if 'host' not in kwargs or kwargs['host'] is None:
+ kwargs['host'] = region.endpoint
+
+ super(CloudWatchLogsConnection, self).__init__(**kwargs)
+ self.region = region
+
+ def _required_auth_capability(self):
+ return ['hmac-v4']
+
+ def create_log_group(self, log_group_name):
+ """
+ Creates a new log group with the specified name. The name of
+ the log group must be unique within a region for an AWS
+ account. You can create up to 100 log groups per account.
+
+ You must use the following guidelines when naming a log group:
+
+ + Log group names can be between 1 and 512 characters long.
+ + Allowed characters are az, AZ, 09, '_' (underscore), '-'
+ (hyphen), '/' (forward slash), and '.' (period).
+
+
+
+ Log groups are created with a default retention of 14 days.
+ The retention attribute allow you to configure the number of
+ days you want to retain log events in the specified log group.
+ See the `SetRetention` operation on how to modify the
+ retention of your log groups.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='CreateLogGroup',
+ body=json.dumps(params))
+
+ def create_log_stream(self, log_group_name, log_stream_name):
+ """
+ Creates a new log stream in the specified log group. The name
+ of the log stream must be unique within the log group. There
+ is no limit on the number of log streams that can exist in a
+ log group.
+
+ You must use the following guidelines when naming a log
+ stream:
+
+ + Log stream names can be between 1 and 512 characters long.
+ + The ':' colon character is not allowed.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='CreateLogStream',
+ body=json.dumps(params))
+
+ def delete_log_group(self, log_group_name):
+ """
+ Deletes the log group with the specified name. Amazon
+ CloudWatch Logs will delete a log group only if there are no
+ log streams and no metric filters associated with the log
+ group. If this condition is not satisfied, the request will
+ fail and the log group will not be deleted.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteLogGroup',
+ body=json.dumps(params))
+
+ def delete_log_stream(self, log_group_name, log_stream_name):
+ """
+ Deletes a log stream and permanently deletes all the archived
+ log events associated with it.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='DeleteLogStream',
+ body=json.dumps(params))
+
+ def delete_metric_filter(self, log_group_name, filter_name):
+ """
+ Deletes a metric filter associated with the specified log
+ group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ }
+ return self.make_request(action='DeleteMetricFilter',
+ body=json.dumps(params))
+
+ def delete_retention_policy(self, log_group_name):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteRetentionPolicy',
+ body=json.dumps(params))
+
+ def describe_log_groups(self, log_group_name_prefix=None,
+ next_token=None, limit=None):
+ """
+ Returns all the log groups that are associated with the AWS
+ account making the request. The list returned in the response
+ is ASCII-sorted by log group name.
+
+ By default, this operation returns up to 50 log groups. If
+ there are more log groups to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log groups returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name_prefix: string
+ :param log_group_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogGroups` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {}
+ if log_group_name_prefix is not None:
+ params['logGroupNamePrefix'] = log_group_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogGroups',
+ body=json.dumps(params))
+
+ def describe_log_streams(self, log_group_name,
+ log_stream_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the log streams that are associated with the
+ specified log group. The list returned in the response is
+ ASCII-sorted by log stream name.
+
+ By default, this operation returns up to 50 log streams. If
+ there are more log streams to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log streams returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name_prefix: string
+ :param log_stream_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogStreams` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if log_stream_name_prefix is not None:
+ params['logStreamNamePrefix'] = log_stream_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogStreams',
+ body=json.dumps(params))
+
+ def describe_metric_filters(self, log_group_name,
+ filter_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the metrics filters associated with the specified
+ log group. The list returned in the response is ASCII-sorted
+ by filter name.
+
+ By default, this operation returns up to 50 metric filters. If
+ there are more metric filters to list, the response would
+ contain a `nextToken` value in the response body. You can also
+ limit the number of metric filters returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name_prefix: string
+ :param filter_name_prefix: The name of the metric filter.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeMetricFilters` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if filter_name_prefix is not None:
+ params['filterNamePrefix'] = filter_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeMetricFilters',
+ body=json.dumps(params))
+
+ def get_log_events(self, log_group_name, log_stream_name,
+ start_time=None, end_time=None, next_token=None,
+ limit=None, start_from_head=None):
+ """
+ Retrieves log events from the specified log stream. You can
+ provide an optional time range to filter the results on the
+ event `timestamp`.
+
+ By default, this operation returns as much log events as can
+ fit in a response size of 1MB, up to 10,000 log events. The
+ response will always include a `nextForwardToken` and a
+ `nextBackwardToken` in the response body. You can use any of
+ these tokens in subsequent `GetLogEvents` requests to paginate
+ through events in either forward or backward direction. You
+ can also limit the number of log events returned in the
+ response by specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type start_time: long
+ :param start_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type end_time: long
+ :param end_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ `nextForwardToken` or `nextBackwardToken` fields in the response of
+ the previous `GetLogEvents` request.
+
+ :type limit: integer
+ :param limit: The maximum number of log events returned in the
+ response. If you don't specify a value, the request would return as
+ much log events as can fit in a response size of 1MB, up to 10,000
+ log events.
+
+ :type start_from_head: boolean
+ :param start_from_head:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ if start_time is not None:
+ params['startTime'] = start_time
+ if end_time is not None:
+ params['endTime'] = end_time
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ if start_from_head is not None:
+ params['startFromHead'] = start_from_head
+ return self.make_request(action='GetLogEvents',
+ body=json.dumps(params))
+
+ def put_log_events(self, log_group_name, log_stream_name, log_events,
+ sequence_token=None):
+ """
+ Uploads a batch of log events to the specified log stream.
+
+ Every PutLogEvents request must include the `sequenceToken`
+ obtained from the response of the previous request. An upload
+ in a newly created log stream does not require a
+ `sequenceToken`.
+
+ The batch of events must satisfy the following constraints:
+
+ + The maximum batch size is 32,768 bytes, and this size is
+ calculated as the sum of all event messages in UTF-8, plus 26
+ bytes for each log event.
+ + None of the log events in the batch can be more than 2 hours
+ in the future.
+ + None of the log events in the batch can be older than 14
+ days or the retention period of the log group.
+ + The log events in the batch must be in chronological ordered
+ by their `timestamp`.
+ + The maximum number of log events in a batch is 1,000.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type log_events: list
+ :param log_events: A list of events belonging to a log stream.
+
+ :type sequence_token: string
+ :param sequence_token: A string token that must be obtained from the
+ response of the previous `PutLogEvents` request.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ 'logEvents': log_events,
+ }
+ if sequence_token is not None:
+ params['sequenceToken'] = sequence_token
+ return self.make_request(action='PutLogEvents',
+ body=json.dumps(params))
+
+ def put_metric_filter(self, log_group_name, filter_name, filter_pattern,
+ metric_transformations):
+ """
+ Creates or updates a metric filter and associates it with the
+ specified log group. Metric filters allow you to configure
+ rules to extract metric data from log events ingested through
+ `PutLogEvents` requests.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type metric_transformations: list
+ :param metric_transformations:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ 'filterPattern': filter_pattern,
+ 'metricTransformations': metric_transformations,
+ }
+ return self.make_request(action='PutMetricFilter',
+ body=json.dumps(params))
+
+ def put_retention_policy(self, log_group_name, retention_in_days):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='PutRetentionPolicy',
+ body=json.dumps(params))
+
+ def set_retention(self, log_group_name, retention_in_days):
+ """
+ Sets the retention of the specified log group. Log groups are
+ created with a default retention of 14 days. The retention
+ attribute allow you to configure the number of days you want
+ to retain log events in the specified log group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='SetRetention',
+ body=json.dumps(params))
+
+ def test_metric_filter(self, filter_pattern, log_event_messages):
+ """
+ Tests the filter pattern of a metric filter against a sample
+ of log event messages. You can use this operation to validate
+ the correctness of a metric filter pattern.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type log_event_messages: list
+ :param log_event_messages:
+
+ """
+ params = {
+ 'filterPattern': filter_pattern,
+ 'logEventMessages': log_event_messages,
+ }
+ return self.make_request(action='TestMetricFilter',
+ body=json.dumps(params))
+
+ def make_request(self, action, body):
+ headers = {
+ 'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
+ 'Host': self.region.endpoint,
+ 'Content-Type': 'application/x-amz-json-1.1',
+ 'Content-Length': str(len(body)),
+ }
+ http_request = self.build_base_http_request(
+ method='POST', path='/', auth_path='/', params={},
+ headers=headers, data=body)
+ response = self._mexe(http_request, sender=None,
+ override_num_retries=10)
+ response_body = response.read().decode('utf-8')
+ boto.log.debug(response_body)
+ if response.status == 200:
+ if response_body:
+ return json.loads(response_body)
+ else:
+ json_body = json.loads(response_body)
+ fault_name = json_body.get('__type', None)
+ exception_class = self._faults.get(fault_name, self.ResponseError)
+ raise exception_class(response.status, response.reason,
+ body=json_body)
diff --git a/awx/lib/site-packages/boto/manage/cmdshell.py b/awx/lib/site-packages/boto/manage/cmdshell.py
index 0d726412ee..f53227763a 100644
--- a/awx/lib/site-packages/boto/manage/cmdshell.py
+++ b/awx/lib/site-packages/boto/manage/cmdshell.py
@@ -18,20 +18,36 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
+"""
+The cmdshell module uses the paramiko package to create SSH connections
+to the servers that are represented by instance objects. The module has
+functions for running commands, managing files, and opening interactive
+shell sessions over those connections.
+"""
from boto.mashups.interactive import interactive_shell
import boto
import os
import time
import shutil
-import StringIO
import paramiko
import socket
import subprocess
+from boto.compat import StringIO
class SSHClient(object):
-
+ """
+ This class creates a paramiko.SSHClient() object that represents
+ a session with an SSH server. You can use the SSHClient object to send
+ commands to the remote host and manipulate files on the remote host.
+
+ :ivar server: A Server object or FakeServer object.
+ :ivar host_key_file: The path to the user's .ssh key files.
+ :ivar uname: The username for the SSH connection. Default = 'root'.
+ :ivar timeout: The optional timeout variable for the TCP connection.
+ :ivar ssh_pwd: An optional password to use for authentication or for
+ unlocking the private key.
+ """
def __init__(self, server,
host_key_file='~/.ssh/known_hosts',
uname='root', timeout=None, ssh_pwd=None):
@@ -48,6 +64,12 @@ class SSHClient(object):
self.connect()
def connect(self, num_retries=5):
+ """
+ Connect to an SSH server and authenticate with it.
+
+ :type num_retries: int
+ :param num_retries: The maximum number of connection attempts.
+ """
retry = 0
while retry < num_retries:
try:
@@ -56,53 +78,132 @@ class SSHClient(object):
pkey=self._pkey,
timeout=self._timeout)
return
- except socket.error, (value, message):
+ except socket.error as xxx_todo_changeme:
+ (value, message) = xxx_todo_changeme.args
if value in (51, 61, 111):
- print 'SSH Connection refused, will retry in 5 seconds'
+ print('SSH Connection refused, will retry in 5 seconds')
time.sleep(5)
retry += 1
else:
raise
except paramiko.BadHostKeyException:
- print "%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname
- print 'Edit that file to remove the entry and then hit return to try again'
+ print("%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname)
+ print('Edit that file to remove the entry and then hit return to try again')
raw_input('Hit Enter when ready')
retry += 1
except EOFError:
- print 'Unexpected Error from SSH Connection, retry in 5 seconds'
+ print('Unexpected Error from SSH Connection, retry in 5 seconds')
time.sleep(5)
retry += 1
- print 'Could not establish SSH connection'
+ print('Could not establish SSH connection')
def open_sftp(self):
+ """
+ Open an SFTP session on the SSH server.
+
+ :rtype: :class:`paramiko.sftp_client.SFTPClient`
+ :return: An SFTP client object.
+ """
return self._ssh_client.open_sftp()
def get_file(self, src, dst):
+ """
+ Open an SFTP session on the remote host, and copy a file from
+ the remote host to the specified path on the local host.
+
+ :type src: string
+ :param src: The path to the target file on the remote host.
+
+ :type dst: string
+ :param dst: The path on your local host where you want to
+ store the file.
+ """
sftp_client = self.open_sftp()
sftp_client.get(src, dst)
def put_file(self, src, dst):
+ """
+ Open an SFTP session on the remote host, and copy a file from
+ the local host to the specified path on the remote host.
+
+ :type src: string
+ :param src: The path to the target file on your local host.
+
+ :type dst: string
+ :param dst: The path on the remote host where you want to store
+ the file.
+ """
sftp_client = self.open_sftp()
sftp_client.put(src, dst)
def open(self, filename, mode='r', bufsize=-1):
"""
- Open a file on the remote system and return a file-like object.
+ Open an SFTP session to the remote host, and open a file on
+ that host.
+
+ :type filename: string
+ :param filename: The path to the file on the remote host.
+
+ :type mode: string
+ :param mode: The file interaction mode.
+
+ :type bufsize: integer
+ :param bufsize: The file buffer size.
+
+ :rtype: :class:`paramiko.sftp_file.SFTPFile`
+ :return: A paramiko proxy object for a file on the remote server.
"""
sftp_client = self.open_sftp()
return sftp_client.open(filename, mode, bufsize)
def listdir(self, path):
+ """
+ List all of the files and subdirectories at the specified path
+ on the remote host.
+
+ :type path: string
+ :param path: The base path from which to obtain the list.
+
+ :rtype: list
+ :return: A list of files and subdirectories at the specified path.
+ """
sftp_client = self.open_sftp()
return sftp_client.listdir(path)
def isdir(self, path):
+ """
+ Check the specified path on the remote host to determine if
+ it is a directory.
+
+ :type path: string
+ :param path: The path to the directory that you want to check.
+
+ :rtype: integer
+ :return: If the path is a directory, the function returns 1.
+ If the path is a file or an invalid path, the function
+ returns 0.
+ """
status = self.run('[ -d %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def exists(self, path):
+ """
+ Check the remote host for the specified path, or a file
+ at the specified path. This function returns 1 if the
+ path or the file exist on the remote host, and returns 0 if
+ the path or the file does not exist on the remote host.
+
+ :type path: string
+ :param path: The path to the directory or file that you want to check.
+
+ :rtype: integer
+ :return: If the path or the file exist, the function returns 1.
+ If the path or the file do not exist on the remote host,
+ the function returns 0.
+ """
+
status = self.run('[ -a %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
@@ -110,16 +211,22 @@ class SSHClient(object):
def shell(self):
"""
- Start an interactive shell session on the remote host.
+ Start an interactive shell session with the remote host.
"""
channel = self._ssh_client.invoke_shell()
interactive_shell(channel)
def run(self, command):
"""
- Execute a command on the remote host. Return a tuple containing
- an integer status and two strings, the first containing stdout
- and the second containing stderr from the command.
+ Run a command on the remote host.
+
+ :type command: string
+ :param command: The command that you want to send to the remote host.
+
+ :rtype: tuple
+ :return: This function returns a tuple that contains an integer status,
+ the stdout from the command, and the stderr from the command.
+
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
status = 0
@@ -138,8 +245,14 @@ class SSHClient(object):
def run_pty(self, command):
"""
- Execute a command on the remote host with a pseudo-terminal.
- Returns a string containing the output of the command.
+ Request a pseudo-terminal from a server, and execute a command on that
+ server.
+
+ :type command: string
+ :param command: The command that you want to run on the remote host.
+
+ :rtype: :class:`paramiko.channel.Channel`
+ :return: An open channel object.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
channel = self._ssh_client.get_transport().open_session()
@@ -148,38 +261,77 @@ class SSHClient(object):
return channel
def close(self):
+ """
+ Close an SSH session and any open channels that are tied to it.
+ """
transport = self._ssh_client.get_transport()
transport.close()
self.server.reset_cmdshell()
class LocalClient(object):
-
+ """
+ :ivar server: A Server object or FakeServer object.
+ :ivar host_key_file: The path to the user's .ssh key files.
+ :ivar uname: The username for the SSH connection. Default = 'root'.
+ """
def __init__(self, server, host_key_file=None, uname='root'):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
def get_file(self, src, dst):
+ """
+ Copy a file from one directory to another.
+ """
shutil.copyfile(src, dst)
def put_file(self, src, dst):
+ """
+ Copy a file from one directory to another.
+ """
shutil.copyfile(src, dst)
def listdir(self, path):
+ """
+ List all of the files and subdirectories at the specified path.
+
+ :rtype: list
+ :return: Return a list containing the names of the entries
+ in the directory given by path.
+ """
return os.listdir(path)
def isdir(self, path):
+ """
+ Check the specified path to determine if it is a directory.
+
+ :rtype: boolean
+ :return: Returns True if the path is an existing directory.
+ """
return os.path.isdir(path)
def exists(self, path):
+ """
+ Check for the specified path, or check a file at the specified path.
+
+ :rtype: boolean
+ :return: If the path or the file exist, the function returns True.
+ """
return os.path.exists(path)
def shell(self):
raise NotImplementedError('shell not supported with LocalClient')
def run(self):
+ """
+ Open a subprocess and run a command on the local host.
+
+ :rtype: tuple
+ :return: This function returns a tuple that contains an integer status
+ and a string with the combined stdout and stderr output.
+ """
boto.log.info('running:%s' % self.command)
- log_fp = StringIO.StringIO()
+ log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while process.poll() is None:
@@ -196,9 +348,13 @@ class LocalClient(object):
class FakeServer(object):
"""
- A little class to fake out SSHClient (which is expecting a
- :class`boto.manage.server.Server` instance. This allows us
- to
+ This object has a subset of the variables that are normally in a
+ :class:`boto.manage.server.Server` object. You can use this FakeServer
+ object to create a :class:`boto.manage.SSHClient` object if you
+ don't have a real Server object.
+
+ :ivar instance: A boto Instance object.
+ :ivar ssh_key_file: The path to the SSH key file.
"""
def __init__(self, instance, ssh_key_file):
self.instance = instance
@@ -207,6 +363,14 @@ class FakeServer(object):
self.instance_id = self.instance.id
def start(server):
+ """
+ Connect to the specified server.
+
+ :return: If the server is local, the function returns a
+ :class:`boto.manage.cmdshell.LocalClient` object.
+ If the server is remote, the function returns a
+ :class:`boto.manage.cmdshell.SSHClient` object.
+ """
instance_id = boto.config.get('Instance', 'instance-id', None)
if instance_id == server.instance_id:
return LocalClient(server)
@@ -223,19 +387,19 @@ def sshclient_from_instance(instance, ssh_key_file,
:type instance: :class`boto.ec2.instance.Instance` object
:param instance: The instance object.
- :type ssh_key_file: str
- :param ssh_key_file: A path to the private key file used
- to log into instance.
+ :type ssh_key_file: string
+ :param ssh_key_file: A path to the private key file that is
+ used to log into the instance.
- :type host_key_file: str
+ :type host_key_file: string
:param host_key_file: A path to the known_hosts file used
by the SSH client.
Defaults to ~/.ssh/known_hosts
- :type user_name: str
+ :type user_name: string
:param user_name: The username to use when logging into
the instance. Defaults to root.
- :type ssh_pwd: str
+ :type ssh_pwd: string
:param ssh_pwd: The passphrase, if any, associated with
private key.
"""
diff --git a/awx/lib/site-packages/boto/manage/propget.py b/awx/lib/site-packages/boto/manage/propget.py
index 45b2ff2221..d034127d8b 100644
--- a/awx/lib/site-packages/boto/manage/propget.py
+++ b/awx/lib/site-packages/boto/manage/propget.py
@@ -19,7 +19,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
def get(prop, choices=None):
prompt = prop.verbose_name
if not prompt:
@@ -38,7 +37,7 @@ def get(prop, choices=None):
value = choices[i-1]
if isinstance(value, tuple):
value = value[0]
- print '[%d] %s' % (i, value)
+ print('[%d] %s' % (i, value))
value = raw_input('%s [%d-%d]: ' % (prompt, min, max))
try:
int_value = int(value)
@@ -47,18 +46,18 @@ def get(prop, choices=None):
value = value[1]
valid = True
except ValueError:
- print '%s is not a valid choice' % value
+ print('%s is not a valid choice' % value)
except IndexError:
- print '%s is not within the range[%d-%d]' % (min, max)
+ print('%s is not within the range[%d-%d]' % (min, max))
else:
value = raw_input('%s: ' % prompt)
try:
value = prop.validate(value)
if prop.empty(value) and prop.required:
- print 'A value is required'
+ print('A value is required')
else:
valid = True
except:
- print 'Invalid value: %s' % value
+ print('Invalid value: %s' % value)
return value
diff --git a/awx/lib/site-packages/boto/manage/server.py b/awx/lib/site-packages/boto/manage/server.py
index 885db651e1..d9224ab8a8 100644
--- a/awx/lib/site-packages/boto/manage/server.py
+++ b/awx/lib/site-packages/boto/manage/server.py
@@ -19,11 +19,10 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
"""
High-level abstraction of an EC2 server
"""
-from __future__ import with_statement
+
import boto.ec2
from boto.mashups.iobject import IObject
from boto.pyami.config import BotoConfigPath, Config
@@ -32,9 +31,10 @@ from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanPropert
from boto.manage import propget
from boto.ec2.zone import Zone
from boto.ec2.keypair import KeyPair
-import os, time, StringIO
+import os, time
from contextlib import closing
from boto.exception import EC2ResponseError
+from boto.compat import six, StringIO
InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge',
'c1.medium', 'c1.xlarge',
@@ -49,7 +49,7 @@ class Bundler(object):
self.ssh_client = SSHClient(server, uname=uname)
def copy_x509(self, key_file, cert_file):
- print '\tcopying cert and pk over to /mnt directory on server'
+ print('\tcopying cert and pk over to /mnt directory on server')
self.ssh_client.open_sftp()
path, name = os.path.split(key_file)
self.remote_key_file = '/mnt/%s' % name
@@ -57,7 +57,7 @@ class Bundler(object):
path, name = os.path.split(cert_file)
self.remote_cert_file = '/mnt/%s' % name
self.ssh_client.put_file(cert_file, self.remote_cert_file)
- print '...complete!'
+ print('...complete!')
def bundle_image(self, prefix, size, ssh_key):
command = ""
@@ -103,7 +103,7 @@ class Bundler(object):
ssh_key = self.server.get_ssh_key_file()
self.copy_x509(key_file, cert_file)
if not fp:
- fp = StringIO.StringIO()
+ fp = StringIO()
fp.write('sudo mv %s /mnt/boto.cfg; ' % BotoConfigPath)
fp.write('mv ~/.ssh/authorized_keys /mnt/authorized_keys; ')
if clear_history:
@@ -115,13 +115,13 @@ class Bundler(object):
fp.write('sudo mv /mnt/boto.cfg %s; ' % BotoConfigPath)
fp.write('mv /mnt/authorized_keys ~/.ssh/authorized_keys')
command = fp.getvalue()
- print 'running the following command on the remote server:'
- print command
+ print('running the following command on the remote server:')
+ print(command)
t = self.ssh_client.run(command)
- print '\t%s' % t[0]
- print '\t%s' % t[1]
- print '...complete!'
- print 'registering image...'
+ print('\t%s' % t[0])
+ print('\t%s' % t[1])
+ print('...complete!')
+ print('registering image...')
self.image_id = self.server.ec2.register_image(name=prefix, image_location='%s/%s.manifest.xml' % (bucket, prefix))
return self.image_id
@@ -250,7 +250,7 @@ class Server(Model):
instance_type = CalculatedProperty(verbose_name="Instance Type", calculated_type=str, use_method=True)
status = CalculatedProperty(verbose_name="Current Status", calculated_type=str, use_method=True)
launch_time = CalculatedProperty(verbose_name="Server Launch Time", calculated_type=str, use_method=True)
- console_output = CalculatedProperty(verbose_name="Console Output", calculated_type=file, use_method=True)
+ console_output = CalculatedProperty(verbose_name="Console Output", calculated_type=open, use_method=True)
packages = []
plugins = []
@@ -305,7 +305,7 @@ class Server(Model):
# deal with possibly passed in logical volume:
if logical_volume != None:
cfg.set('EBS', 'logical_volume_name', logical_volume.name)
- cfg_fp = StringIO.StringIO()
+ cfg_fp = StringIO()
cfg.write(cfg_fp)
# deal with the possibility that zone and/or keypair are strings read from the config file:
if isinstance(zone, Zone):
@@ -325,14 +325,14 @@ class Server(Model):
instances = reservation.instances
if elastic_ip is not None and instances.__len__() > 0:
instance = instances[0]
- print 'Waiting for instance to start so we can set its elastic IP address...'
+ print('Waiting for instance to start so we can set its elastic IP address...')
# Sometimes we get a message from ec2 that says that the instance does not exist.
# Hopefully the following delay will giv eec2 enough time to get to a stable state:
time.sleep(5)
while instance.update() != 'running':
time.sleep(1)
instance.use_ip(elastic_ip)
- print 'set the elastic IP of the first instance to %s' % elastic_ip
+ print('set the elastic IP of the first instance to %s' % elastic_ip)
for instance in instances:
s = cls()
s.ec2 = ec2
@@ -381,7 +381,7 @@ class Server(Model):
for reservation in rs:
for instance in reservation.instances:
try:
- Server.find(instance_id=instance.id).next()
+ next(Server.find(instance_id=instance.id))
boto.log.info('Server for %s already exists' % instance.id)
except StopIteration:
s = cls()
@@ -527,7 +527,7 @@ class Server(Model):
def get_cmdshell(self):
if not self._cmdshell:
- import cmdshell
+ from boto.manage import cmdshell
self.get_ssh_key_file()
self._cmdshell = cmdshell.start(self)
return self._cmdshell
diff --git a/awx/lib/site-packages/boto/manage/task.py b/awx/lib/site-packages/boto/manage/task.py
index 5d273c3193..c6663b9f0e 100644
--- a/awx/lib/site-packages/boto/manage/task.py
+++ b/awx/lib/site-packages/boto/manage/task.py
@@ -23,7 +23,8 @@
import boto
from boto.sdb.db.property import StringProperty, DateTimeProperty, IntegerProperty
from boto.sdb.db.model import Model
-import datetime, subprocess, StringIO, time
+import datetime, subprocess, time
+from boto.compat import StringIO
def check_hour(val):
if val == '*':
@@ -100,7 +101,7 @@ class Task(Model):
def _run(self, msg, vtimeout):
boto.log.info('Task[%s] - running:%s' % (self.name, self.command))
- log_fp = StringIO.StringIO()
+ log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
nsecs = 5
diff --git a/awx/lib/site-packages/boto/manage/test_manage.py b/awx/lib/site-packages/boto/manage/test_manage.py
index e0b032a9b8..a8c188c319 100644
--- a/awx/lib/site-packages/boto/manage/test_manage.py
+++ b/awx/lib/site-packages/boto/manage/test_manage.py
@@ -2,33 +2,33 @@ from boto.manage.server import Server
from boto.manage.volume import Volume
import time
-print '--> Creating New Volume'
+print('--> Creating New Volume')
volume = Volume.create()
-print volume
+print(volume)
-print '--> Creating New Server'
+print('--> Creating New Server')
server_list = Server.create()
server = server_list[0]
-print server
+print(server)
-print '----> Waiting for Server to start up'
+print('----> Waiting for Server to start up')
while server.status != 'running':
- print '*'
+ print('*')
time.sleep(10)
-print '----> Server is running'
+print('----> Server is running')
-print '--> Run "df -k" on Server'
+print('--> Run "df -k" on Server')
status = server.run('df -k')
-print status[1]
+print(status[1])
-print '--> Now run volume.make_ready to make the volume ready to use on server'
+print('--> Now run volume.make_ready to make the volume ready to use on server')
volume.make_ready(server)
-print '--> Run "df -k" on Server'
+print('--> Run "df -k" on Server')
status = server.run('df -k')
-print status[1]
+print(status[1])
-print '--> Do an "ls -al" on the new filesystem'
+print('--> Do an "ls -al" on the new filesystem')
status = server.run('ls -al %s' % volume.mount_point)
-print status[1]
+print(status[1])
diff --git a/awx/lib/site-packages/boto/manage/volume.py b/awx/lib/site-packages/boto/manage/volume.py
index fae9df464d..410414c7b3 100644
--- a/awx/lib/site-packages/boto/manage/volume.py
+++ b/awx/lib/site-packages/boto/manage/volume.py
@@ -18,8 +18,8 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from __future__ import print_function
-from __future__ import with_statement
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty, IntegerProperty, ListProperty, ReferenceProperty, CalculatedProperty
from boto.manage.server import Server
@@ -199,7 +199,7 @@ class Volume(Model):
def attach(self, server=None):
if self.attachment_state == 'attached':
- print 'already attached'
+ print('already attached')
return None
if server:
self.server = server
@@ -210,7 +210,7 @@ class Volume(Model):
def detach(self, force=False):
state = self.attachment_state
if state == 'available' or state is None or state == 'detaching':
- print 'already detached'
+ print('already detached')
return None
ec2 = self.get_ec2_connection()
ec2.detach_volume(self.volume_id, self.server.instance_id, self.device, force)
@@ -353,9 +353,9 @@ class Volume(Model):
day=now.day, tzinfo=now.tzinfo)
# Keep the first snapshot from each day of the previous week
one_week = datetime.timedelta(days=7, seconds=60*60)
- print midnight-one_week, midnight
+ print(midnight-one_week, midnight)
previous_week = self.get_snapshot_range(snaps, midnight-one_week, midnight)
- print previous_week
+ print(previous_week)
if not previous_week:
return snaps
current_day = None
diff --git a/awx/lib/site-packages/boto/mashups/interactive.py b/awx/lib/site-packages/boto/mashups/interactive.py
index b80e661e5f..1eb9db47d5 100644
--- a/awx/lib/site-packages/boto/mashups/interactive.py
+++ b/awx/lib/site-packages/boto/mashups/interactive.py
@@ -15,7 +15,7 @@
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
-
+from __future__ import print_function
import socket
import sys
@@ -51,7 +51,7 @@ def posix_shell(chan):
try:
x = chan.recv(1024)
if len(x) == 0:
- print '\r\n*** EOF\r\n',
+ print('\r\n*** EOF\r\n', end=' ')
break
sys.stdout.write(x)
sys.stdout.flush()
diff --git a/awx/lib/site-packages/boto/mashups/iobject.py b/awx/lib/site-packages/boto/mashups/iobject.py
index de74287bc5..f6ae98a34a 100644
--- a/awx/lib/site-packages/boto/mashups/iobject.py
+++ b/awx/lib/site-packages/boto/mashups/iobject.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import os
def int_val_fn(v):
@@ -33,7 +32,7 @@ class IObject(object):
def choose_from_list(self, item_list, search_str='',
prompt='Enter Selection'):
if not item_list:
- print 'No Choices Available'
+ print('No Choices Available')
return
choice = None
while not choice:
@@ -41,19 +40,19 @@ class IObject(object):
choices = []
for item in item_list:
if isinstance(item, basestring):
- print '[%d] %s' % (n, item)
+ print('[%d] %s' % (n, item))
choices.append(item)
n += 1
else:
obj, id, desc = item
if desc:
if desc.find(search_str) >= 0:
- print '[%d] %s - %s' % (n, id, desc)
+ print('[%d] %s - %s' % (n, id, desc))
choices.append(obj)
n += 1
else:
if id.find(search_str) >= 0:
- print '[%d] %s' % (n, id)
+ print('[%d] %s' % (n, id))
choices.append(obj)
n += 1
if choices:
@@ -67,12 +66,12 @@ class IObject(object):
return None
choice = choices[int_val-1]
except ValueError:
- print '%s is not a valid choice' % val
+ print('%s is not a valid choice' % val)
except IndexError:
- print '%s is not within the range[1-%d]' % (val,
- len(choices))
+ print('%s is not within the range[1-%d]' % (val,
+ len(choices)))
else:
- print "No objects matched your pattern"
+ print("No objects matched your pattern")
search_str = ''
return choice
@@ -83,7 +82,7 @@ class IObject(object):
if validation_fn:
okay = validation_fn(val)
if not okay:
- print 'Invalid value: %s' % val
+ print('Invalid value: %s' % val)
else:
okay = True
return val
@@ -105,7 +104,7 @@ class IObject(object):
else:
val = ''
else:
- print 'Invalid value: %s' % val
+ print('Invalid value: %s' % val)
val = ''
return val
diff --git a/awx/lib/site-packages/boto/mashups/order.py b/awx/lib/site-packages/boto/mashups/order.py
index c4deebfff5..4aaec307bd 100644
--- a/awx/lib/site-packages/boto/mashups/order.py
+++ b/awx/lib/site-packages/boto/mashups/order.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
"""
High-level abstraction of an EC2 order for servers
"""
@@ -29,7 +28,8 @@ from boto.mashups.server import Server, ServerSet
from boto.mashups.iobject import IObject
from boto.pyami.config import Config
from boto.sdb.persist import get_domain, set_domain
-import time, StringIO
+import time
+from boto.compat import StringIO
InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge', 'c1.medium', 'c1.xlarge']
@@ -124,7 +124,7 @@ class Item(IObject):
self.config = Config(path=config_path)
def get_userdata_string(self):
- s = StringIO.StringIO()
+ s = StringIO()
self.config.write(s)
return s.getvalue()
@@ -171,16 +171,16 @@ class Order(IObject):
self.items.append(item)
def display(self):
- print 'This Order consists of the following items'
- print
- print 'QTY\tNAME\tTYPE\nAMI\t\tGroups\t\t\tKeyPair'
+ print('This Order consists of the following items')
+ print()
+ print('QTY\tNAME\tTYPE\nAMI\t\tGroups\t\t\tKeyPair')
for item in self.items:
- print '%s\t%s\t%s\t%s\t%s\t%s' % (item.quantity, item.name, item.instance_type,
- item.ami.id, item.groups, item.key.name)
+ print('%s\t%s\t%s\t%s\t%s\t%s' % (item.quantity, item.name, item.instance_type,
+ item.ami.id, item.groups, item.key.name))
def place(self, block=True):
if get_domain() is None:
- print 'SDB Persistence Domain not set'
+ print('SDB Persistence Domain not set')
domain_name = self.get_string('Specify SDB Domain')
set_domain(domain_name)
s = ServerSet()
@@ -192,7 +192,7 @@ class Order(IObject):
if block:
states = [i.state for i in r.instances]
if states.count('running') != len(states):
- print states
+ print(states)
time.sleep(15)
states = [i.update() for i in r.instances]
for i in r.instances:
diff --git a/awx/lib/site-packages/boto/mashups/server.py b/awx/lib/site-packages/boto/mashups/server.py
index cb6d78a718..7045e7f4da 100644
--- a/awx/lib/site-packages/boto/mashups/server.py
+++ b/awx/lib/site-packages/boto/mashups/server.py
@@ -18,19 +18,19 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
"""
High-level abstraction of an EC2 server
"""
+
import boto
import boto.utils
+from boto.compat import StringIO
from boto.mashups.iobject import IObject
from boto.pyami.config import Config, BotoConfigPath
from boto.mashups.interactive import interactive_shell
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty
import os
-import StringIO
class ServerSet(list):
@@ -228,7 +228,7 @@ class Server(Model):
self._config.set('Pyami', 'server_sdb_domain', self._manager.domain.name)
self._config.set("Pyami", 'server_sdb_name', self.name)
- cfg = StringIO.StringIO()
+ cfg = StringIO()
self._config.write(cfg)
cfg = cfg.getvalue()
r = ami.run(min_count=1,
@@ -252,7 +252,7 @@ class Server(Model):
uname='root'):
import paramiko
if not self.instance:
- print 'No instance yet!'
+ print('No instance yet!')
return
if not self._ssh_client:
if not key_file:
@@ -288,8 +288,8 @@ class Server(Model):
interactive_shell(channel)
def bundle_image(self, prefix, key_file, cert_file, size):
- print 'bundling image...'
- print '\tcopying cert and pk over to /mnt directory on server'
+ print('bundling image...')
+ print('\tcopying cert and pk over to /mnt directory on server')
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
path, name = os.path.split(key_file)
@@ -298,7 +298,7 @@ class Server(Model):
path, name = os.path.split(cert_file)
remote_cert_file = '/mnt/%s' % name
self.put_file(cert_file, remote_cert_file)
- print '\tdeleting %s' % BotoConfigPath
+ print('\tdeleting %s' % BotoConfigPath)
# delete the metadata.ini file if it exists
try:
sftp_client.remove(BotoConfigPath)
@@ -314,27 +314,27 @@ class Server(Model):
command += '-r i386'
else:
command += '-r x86_64'
- print '\t%s' % command
+ print('\t%s' % command)
t = ssh_client.exec_command(command)
response = t[1].read()
- print '\t%s' % response
- print '\t%s' % t[2].read()
- print '...complete!'
+ print('\t%s' % response)
+ print('\t%s' % t[2].read())
+ print('...complete!')
def upload_bundle(self, bucket, prefix):
- print 'uploading bundle...'
+ print('uploading bundle...')
command = 'ec2-upload-bundle '
command += '-m /mnt/%s.manifest.xml ' % prefix
command += '-b %s ' % bucket
command += '-a %s ' % self.ec2.aws_access_key_id
command += '-s %s ' % self.ec2.aws_secret_access_key
- print '\t%s' % command
+ print('\t%s' % command)
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
- print '\t%s' % response
- print '\t%s' % t[2].read()
- print '...complete!'
+ print('\t%s' % response)
+ print('\t%s' % t[2].read())
+ print('...complete!')
def create_image(self, bucket=None, prefix=None, key_file=None, cert_file=None, size=None):
iobject = IObject()
@@ -350,7 +350,7 @@ class Server(Model):
size = iobject.get_int('Size (in MB) of bundled image')
self.bundle_image(prefix, key_file, cert_file, size)
self.upload_bundle(bucket, prefix)
- print 'registering image...'
+ print('registering image...')
self.image_id = self.ec2.register_image('%s/%s.manifest.xml' % (bucket, prefix))
return self.image_id
@@ -384,12 +384,12 @@ class Server(Model):
return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id)
def install_package(self, package_name):
- print 'installing %s...' % package_name
+ print('installing %s...' % package_name)
command = 'yum -y install %s' % package_name
- print '\t%s' % command
+ print('\t%s' % command)
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
- print '\t%s' % response
- print '\t%s' % t[2].read()
- print '...complete!'
+ print('\t%s' % response)
+ print('\t%s' % t[2].read())
+ print('...complete!')
diff --git a/awx/lib/site-packages/boto/mturk/connection.py b/awx/lib/site-packages/boto/mturk/connection.py
index ff011ff652..14b8ced0fd 100644
--- a/awx/lib/site-packages/boto/mturk/connection.py
+++ b/awx/lib/site-packages/boto/mturk/connection.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import xml.sax
import datetime
import itertools
@@ -307,7 +306,7 @@ class MTurkConnection(AWSQueryConnection):
records, return the page numbers to be retrieved.
"""
pages = total_records / page_size + bool(total_records % page_size)
- return range(1, pages + 1)
+ return list(range(1, pages + 1))
def get_all_hits(self):
"""
@@ -388,15 +387,15 @@ class MTurkConnection(AWSQueryConnection):
The number of assignments on the page in the filtered results
list, equivalent to the number of assignments being returned
by this call.
- A non-negative integer
+ A non-negative integer, as a string.
PageNumber
The number of the page in the filtered results list being
returned.
- A positive integer
+ A positive integer, as a string.
TotalNumResults
The total number of HITs in the filtered results list based
on this call.
- A non-negative integer
+ A non-negative integer, as a string.
The ResultSet will contain zero or more Assignment objects
@@ -829,7 +828,7 @@ class MTurkConnection(AWSQueryConnection):
"""
body = response.read()
if self.debug == 2:
- print body
+ print(body)
if '' not in body:
rs = ResultSet(marker_elems)
h = handler.XmlHandler(rs, self)
diff --git a/awx/lib/site-packages/boto/mws/connection.py b/awx/lib/site-packages/boto/mws/connection.py
index 7c068b52de..01b0b30b3b 100644
--- a/awx/lib/site-packages/boto/mws/connection.py
+++ b/awx/lib/site-packages/boto/mws/connection.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+# Copyright (c) 2012-2014 Andy Davidoff http://www.disruptek.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
@@ -20,30 +20,41 @@
# IN THE SOFTWARE.
import xml.sax
import hashlib
-import base64
import string
+import collections
from boto.connection import AWSQueryConnection
-from boto.mws.exception import ResponseErrorFactory
-from boto.mws.response import ResponseFactory, ResponseElement
-from boto.handler import XmlHandler
+from boto.exception import BotoServerError
+import boto.mws.exception
import boto.mws.response
+from boto.handler import XmlHandler
+from boto.compat import filter, map, six, encodebytes
__all__ = ['MWSConnection']
api_version_path = {
- 'Feeds': ('2009-01-01', 'Merchant', '/'),
- 'Reports': ('2009-01-01', 'Merchant', '/'),
- 'Orders': ('2011-01-01', 'SellerId', '/Orders/2011-01-01'),
- 'Products': ('2011-10-01', 'SellerId', '/Products/2011-10-01'),
- 'Sellers': ('2011-07-01', 'SellerId', '/Sellers/2011-07-01'),
- 'Inbound': ('2010-10-01', 'SellerId',
- '/FulfillmentInboundShipment/2010-10-01'),
- 'Outbound': ('2010-10-01', 'SellerId',
- '/FulfillmentOutboundShipment/2010-10-01'),
- 'Inventory': ('2010-10-01', 'SellerId',
- '/FulfillmentInventory/2010-10-01'),
+ 'Feeds': ('2009-01-01', 'Merchant', '/'),
+ 'Reports': ('2009-01-01', 'Merchant', '/'),
+ 'Orders': ('2013-09-01', 'SellerId', '/Orders/2013-09-01'),
+ 'Products': ('2011-10-01', 'SellerId', '/Products/2011-10-01'),
+ 'Sellers': ('2011-07-01', 'SellerId', '/Sellers/2011-07-01'),
+ 'Inbound': ('2010-10-01', 'SellerId',
+ '/FulfillmentInboundShipment/2010-10-01'),
+ 'Outbound': ('2010-10-01', 'SellerId',
+ '/FulfillmentOutboundShipment/2010-10-01'),
+ 'Inventory': ('2010-10-01', 'SellerId',
+ '/FulfillmentInventory/2010-10-01'),
+ 'Recommendations': ('2013-04-01', 'SellerId',
+ '/Recommendations/2013-04-01'),
+ 'CustomerInfo': ('2014-03-01', 'SellerId',
+ '/CustomerInformation/2014-03-01'),
+ 'CartInfo': ('2014-03-01', 'SellerId',
+ '/CartInformation/2014-03-01'),
+ 'Subscriptions': ('2013-07-01', 'SellerId',
+ '/Subscriptions/2013-07-01'),
+ 'OffAmazonPayments': ('2013-01-01', 'SellerId',
+ '/OffAmazonPayments/2013-01-01'),
}
-content_md5 = lambda c: base64.encodestring(hashlib.md5(c).digest()).strip()
+content_md5 = lambda c: encodebytes(hashlib.md5(c).digest()).strip()
decorated_attrs = ('action', 'response', 'section',
'quota', 'restore', 'version')
api_call_map = {}
@@ -52,6 +63,7 @@ api_call_map = {}
def add_attrs_from(func, to):
for attr in decorated_attrs:
setattr(to, attr, getattr(func, attr, None))
+ to.__wrapped__ = func
return to
@@ -94,34 +106,40 @@ def http_body(field):
return decorator
-def destructure_object(value, into, prefix=''):
- if isinstance(value, ResponseElement):
- destructure_object(value.__dict__, into, prefix=prefix)
- elif isinstance(value, dict):
- for name, attr in value.iteritems():
+def destructure_object(value, into, prefix, members=False):
+ if isinstance(value, boto.mws.response.ResponseElement):
+ destructure_object(value.__dict__, into, prefix, members=members)
+ elif isinstance(value, collections.Mapping):
+ for name in value:
if name.startswith('_'):
continue
- destructure_object(attr, into, prefix=prefix + '.' + name)
- elif any([isinstance(value, typ) for typ in (list, set, tuple)]):
+ destructure_object(value[name], into, prefix + '.' + name,
+ members=members)
+ elif isinstance(value, six.string_types):
+ into[prefix] = value
+ elif isinstance(value, collections.Iterable):
for index, element in enumerate(value):
- newprefix = prefix + '.' + str(index + 1)
- destructure_object(element, into, prefix=newprefix)
+ suffix = (members and '.member.' or '.') + str(index + 1)
+ destructure_object(element, into, prefix + suffix,
+ members=members)
elif isinstance(value, bool):
into[prefix] = str(value).lower()
else:
into[prefix] = value
-def structured_objects(*fields):
+def structured_objects(*fields, **kwargs):
def decorator(func):
def wrapper(*args, **kw):
- for field in filter(kw.has_key, fields):
- destructure_object(kw.pop(field), kw, prefix=field)
+ members = kwargs.get('members', False)
+ for field in filter(lambda i: i in kw, fields):
+ destructure_object(kw.pop(field), kw, field, members=members)
return func(*args, **kw)
- wrapper.__doc__ = "{0}\nObjects|dicts: {1}".format(func.__doc__,
- ', '.join(fields))
+ wrapper.__doc__ = "{0}\nElement|Iter|Map: {1}\n" \
+ "(ResponseElement or anything iterable/dict-like)" \
+ .format(func.__doc__, ', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
@@ -130,18 +148,18 @@ def requires(*groups):
def decorator(func):
- def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if 1 != len(filter(hasgroup, groups)):
+ def requires(*args, **kw):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if 1 != len(list(filter(hasgroup, groups))):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires {1} argument(s)" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
- wrapper.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
- message)
- return add_attrs_from(func, to=wrapper)
+ requires.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
+ message)
+ return add_attrs_from(func, to=requires)
return decorator
@@ -150,8 +168,8 @@ def exclusive(*groups):
def decorator(func):
def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if len(filter(hasgroup, groups)) not in (0, 1):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if len(list(filter(hasgroup, groups))) not in (0, 1):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires either {1}" \
"".format(func.action, message)
@@ -169,8 +187,8 @@ def dependent(field, *groups):
def decorator(func):
def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if field in kw and 1 > len(filter(hasgroup, groups)):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if field in kw and not any(hasgroup(g) for g in groups):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} argument {1} requires {2}" \
"".format(func.action, field, message)
@@ -188,15 +206,15 @@ def requires_some_of(*fields):
def decorator(func):
- def wrapper(*args, **kw):
- if not filter(kw.has_key, fields):
+ def requires(*args, **kw):
+ if not any(i in kw for i in fields):
message = "{0} requires at least one of {1} argument(s)" \
"".format(func.action, ', '.join(fields))
raise KeyError(message)
return func(*args, **kw)
- wrapper.__doc__ = "{0}\nSome Required: {1}".format(func.__doc__,
- ', '.join(fields))
- return add_attrs_from(func, to=wrapper)
+ requires.__doc__ = "{0}\nSome Required: {1}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=requires)
return decorator
@@ -218,12 +236,7 @@ def api_action(section, quota, restore, *api):
def decorator(func, quota=int(quota), restore=float(restore)):
version, accesskey, path = api_version_path[section]
- action = ''.join(api or map(str.capitalize, func.func_name.split('_')))
- if hasattr(boto.mws.response, action + 'Response'):
- response = getattr(boto.mws.response, action + 'Response')
- else:
- response = ResponseFactory(action)
- response._action = action
+ action = ''.join(api or map(str.capitalize, func.__name__.split('_')))
def wrapper(self, *args, **kw):
kw.setdefault(accesskey, getattr(self, accesskey, None))
@@ -234,61 +247,92 @@ def api_action(section, quota, restore, *api):
raise KeyError(message)
kw['Action'] = action
kw['Version'] = version
- return func(self, path, response, *args, **kw)
+ response = self._response_factory(action, connection=self)
+ request = dict(path=path, quota=quota, restore=restore)
+ return func(self, request, response, *args, **kw)
for attr in decorated_attrs:
setattr(wrapper, attr, locals().get(attr))
wrapper.__doc__ = "MWS {0}/{1} API call; quota={2} restore={3:.2f}\n" \
"{4}".format(action, version, quota, restore,
func.__doc__)
- api_call_map[action] = func.func_name
+ api_call_map[action] = func.__name__
return wrapper
return decorator
class MWSConnection(AWSQueryConnection):
- ResponseError = ResponseErrorFactory
+ ResponseFactory = boto.mws.response.ResponseFactory
+ ResponseErrorFactory = boto.mws.exception.ResponseErrorFactory
def __init__(self, *args, **kw):
kw.setdefault('host', 'mws.amazonservices.com')
+ self._sandboxed = kw.pop('sandbox', False)
self.Merchant = kw.pop('Merchant', None) or kw.get('SellerId')
self.SellerId = kw.pop('SellerId', None) or self.Merchant
+ kw = self._setup_factories(kw.pop('factory_scopes', []), **kw)
super(MWSConnection, self).__init__(*args, **kw)
+ def _setup_factories(self, extrascopes, **kw):
+ for factory, (scope, Default) in {
+ 'response_factory':
+ (boto.mws.response, self.ResponseFactory),
+ 'response_error_factory':
+ (boto.mws.exception, self.ResponseErrorFactory),
+ }.items():
+ if factory in kw:
+ setattr(self, '_' + factory, kw.pop(factory))
+ else:
+ scopes = extrascopes + [scope]
+ setattr(self, '_' + factory, Default(scopes=scopes))
+ return kw
+
+ def _sandboxify(self, path):
+ if not self._sandboxed:
+ return path
+ splat = path.split('/')
+ splat[-2] += '_Sandbox'
+ return splat.join('/')
+
def _required_auth_capability(self):
return ['mws']
- def post_request(self, path, params, cls, body='', headers=None,
- isXML=True):
+ def _post_request(self, request, params, parser, body='', headers=None):
"""Make a POST request, optionally with a content body,
and return the response, optionally as raw text.
- Modelled off of the inherited get_object/make_request flow.
"""
headers = headers or {}
+ path = self._sandboxify(request['path'])
request = self.build_base_http_request('POST', path, None, data=body,
params=params, headers=headers,
host=self.host)
- response = self._mexe(request, override_num_retries=None)
+ try:
+ response = self._mexe(request, override_num_retries=None)
+ except BotoServerError as bs:
+ raise self._response_error_factor(bs.status, bs.reason, bs.body)
body = response.read()
boto.log.debug(body)
if not body:
boto.log.error('Null body %s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ raise self._response_error_factory(response.status,
+ response.reason, body)
if response.status != 200:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
- if not isXML:
- digest = response.getheader('Content-MD5')
+ raise self._response_error_factory(response.status,
+ response.reason, body)
+ digest = response.getheader('Content-MD5')
+ if digest is not None:
assert content_md5(body) == digest
- return body
- return self._parse_response(cls, body)
+ contenttype = response.getheader('Content-Type')
+ return self._parse_response(parser, contenttype, body)
- def _parse_response(self, cls, body):
- obj = cls(self)
- h = XmlHandler(obj, self)
- xml.sax.parseString(body, h)
- return obj
+ def _parse_response(self, parser, contenttype, body):
+ if not contenttype.startswith('text/xml'):
+ return body
+ handler = XmlHandler(parser, self)
+ xml.sax.parseString(body, handler)
+ return parser
def method_for(self, name):
"""Return the MWS API method referred to in the argument.
@@ -320,55 +364,55 @@ class MWSConnection(AWSQueryConnection):
response = more(NextToken=response._result.NextToken)
yield response
+ @requires(['FeedType'])
@boolean_arguments('PurgeAndReplace')
@http_body('FeedContent')
@structured_lists('MarketplaceIdList.Id')
- @requires(['FeedType'])
@api_action('Feeds', 15, 120)
- def submit_feed(self, path, response, headers=None, body='', **kw):
+ def submit_feed(self, request, response, headers=None, body='', **kw):
"""Uploads a feed for processing by Amazon MWS.
"""
headers = headers or {}
- return self.post_request(path, kw, response, body=body,
- headers=headers)
+ return self._post_request(request, kw, response, body=body,
+ headers=headers)
@structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type',
'FeedProcessingStatusList.Status')
@api_action('Feeds', 10, 45)
- def get_feed_submission_list(self, path, response, **kw):
+ def get_feed_submission_list(self, request, response, **kw):
"""Returns a list of all feed submissions submitted in the
previous 90 days.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Feeds', 0, 0)
- def get_feed_submission_list_by_next_token(self, path, response, **kw):
+ def get_feed_submission_list_by_next_token(self, request, response, **kw):
"""Returns a list of feed submissions using the NextToken parameter.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('FeedTypeList.Type', 'FeedProcessingStatusList.Status')
@api_action('Feeds', 10, 45)
- def get_feed_submission_count(self, path, response, **kw):
+ def get_feed_submission_count(self, request, response, **kw):
"""Returns a count of the feeds submitted in the previous 90 days.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type')
@api_action('Feeds', 10, 45)
- def cancel_feed_submissions(self, path, response, **kw):
+ def cancel_feed_submissions(self, request, response, **kw):
"""Cancels one or more feed submissions and returns a
count of the feed submissions that were canceled.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['FeedSubmissionId'])
@api_action('Feeds', 15, 60)
- def get_feed_submission_result(self, path, response, **kw):
+ def get_feed_submission_result(self, request, response, **kw):
"""Returns the feed processing report.
"""
- return self.post_request(path, kw, response, isXML=False)
+ return self._post_request(request, kw, response)
def get_service_status(self, **kw):
"""Instruct the user on how to get service status.
@@ -379,298 +423,298 @@ class MWSConnection(AWSQueryConnection):
"{1}".format(self.__class__.__name__, sections)
raise AttributeError(message)
+ @requires(['ReportType'])
@structured_lists('MarketplaceIdList.Id')
@boolean_arguments('ReportOptions=ShowSalesChannel')
- @requires(['ReportType'])
@api_action('Reports', 15, 60)
- def request_report(self, path, response, **kw):
+ def request_report(self, request, response, **kw):
"""Creates a report request and submits the request to Amazon MWS.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type',
'ReportProcessingStatusList.Status')
@api_action('Reports', 10, 45)
- def get_report_request_list(self, path, response, **kw):
+ def get_report_request_list(self, request, response, **kw):
"""Returns a list of report requests that you can use to get the
ReportRequestId for a report.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
- def get_report_request_list_by_next_token(self, path, response, **kw):
+ def get_report_request_list_by_next_token(self, request, response, **kw):
"""Returns a list of report requests using the NextToken,
which was supplied by a previous request to either
GetReportRequestListByNextToken or GetReportRequestList, where
the value of HasNext was true in that previous request.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('ReportTypeList.Type',
'ReportProcessingStatusList.Status')
@api_action('Reports', 10, 45)
- def get_report_request_count(self, path, response, **kw):
+ def get_report_request_count(self, request, response, **kw):
"""Returns a count of report requests that have been submitted
to Amazon MWS for processing.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Reports', 10, 45)
- def cancel_report_requests(self, path, response, **kw):
+ def cancel_report_requests(self, request, response, **kw):
"""Cancel one or more report requests, returning the count of the
canceled report requests and the report request information.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@boolean_arguments('Acknowledged')
@structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type')
@api_action('Reports', 10, 60)
- def get_report_list(self, path, response, **kw):
+ def get_report_list(self, request, response, **kw):
"""Returns a list of reports that were created in the previous
90 days that match the query parameters.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
- def get_report_list_by_next_token(self, path, response, **kw):
+ def get_report_list_by_next_token(self, request, response, **kw):
"""Returns a list of reports using the NextToken, which
was supplied by a previous request to either
GetReportListByNextToken or GetReportList, where the
value of HasNext was true in the previous call.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@boolean_arguments('Acknowledged')
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
- def get_report_count(self, path, response, **kw):
+ def get_report_count(self, request, response, **kw):
"""Returns a count of the reports, created in the previous 90 days,
with a status of _DONE_ and that are available for download.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['ReportId'])
@api_action('Reports', 15, 60)
- def get_report(self, path, response, **kw):
+ def get_report(self, request, response, **kw):
"""Returns the contents of a report.
"""
- return self.post_request(path, kw, response, isXML=False)
+ return self._post_request(request, kw, response)
@requires(['ReportType', 'Schedule'])
@api_action('Reports', 10, 45)
- def manage_report_schedule(self, path, response, **kw):
+ def manage_report_schedule(self, request, response, **kw):
"""Creates, updates, or deletes a report request schedule for
a specified report type.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
- def get_report_schedule_list(self, path, response, **kw):
+ def get_report_schedule_list(self, request, response, **kw):
"""Returns a list of order report requests that are scheduled
to be submitted to Amazon MWS for processing.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
- def get_report_schedule_list_by_next_token(self, path, response, **kw):
+ def get_report_schedule_list_by_next_token(self, request, response, **kw):
"""Returns a list of report requests using the NextToken,
which was supplied by a previous request to either
GetReportScheduleListByNextToken or GetReportScheduleList,
where the value of HasNext was true in that previous request.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
- def get_report_schedule_count(self, path, response, **kw):
+ def get_report_schedule_count(self, request, response, **kw):
"""Returns a count of order report requests that are scheduled
to be submitted to Amazon MWS.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
- @boolean_arguments('Acknowledged')
@requires(['ReportIdList'])
+ @boolean_arguments('Acknowledged')
@structured_lists('ReportIdList.Id')
@api_action('Reports', 10, 45)
- def update_report_acknowledgements(self, path, response, **kw):
+ def update_report_acknowledgements(self, request, response, **kw):
"""Updates the acknowledged status of one or more reports.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['ShipFromAddress', 'InboundShipmentPlanRequestItems'])
@structured_objects('ShipFromAddress', 'InboundShipmentPlanRequestItems')
@api_action('Inbound', 30, 0.5)
- def create_inbound_shipment_plan(self, path, response, **kw):
+ def create_inbound_shipment_plan(self, request, response, **kw):
"""Returns the information required to create an inbound shipment.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['ShipmentId', 'InboundShipmentHeader', 'InboundShipmentItems'])
@structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
@api_action('Inbound', 30, 0.5)
- def create_inbound_shipment(self, path, response, **kw):
+ def create_inbound_shipment(self, request, response, **kw):
"""Creates an inbound shipment.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['ShipmentId'])
@structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
@api_action('Inbound', 30, 0.5)
- def update_inbound_shipment(self, path, response, **kw):
+ def update_inbound_shipment(self, request, response, **kw):
"""Updates an existing inbound shipment. Amazon documentation
is ambiguous as to whether the InboundShipmentHeader and
InboundShipmentItems arguments are required.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires_some_of('ShipmentIdList', 'ShipmentStatusList')
@structured_lists('ShipmentIdList.Id', 'ShipmentStatusList.Status')
@api_action('Inbound', 30, 0.5)
- def list_inbound_shipments(self, path, response, **kw):
+ def list_inbound_shipments(self, request, response, **kw):
"""Returns a list of inbound shipments based on criteria that
you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Inbound', 30, 0.5)
- def list_inbound_shipments_by_next_token(self, path, response, **kw):
+ def list_inbound_shipments_by_next_token(self, request, response, **kw):
"""Returns the next page of inbound shipments using the NextToken
parameter.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['ShipmentId'], ['LastUpdatedAfter', 'LastUpdatedBefore'])
@api_action('Inbound', 30, 0.5)
- def list_inbound_shipment_items(self, path, response, **kw):
+ def list_inbound_shipment_items(self, request, response, **kw):
"""Returns a list of items in a specified inbound shipment, or a
list of items that were updated within a specified time frame.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Inbound', 30, 0.5)
- def list_inbound_shipment_items_by_next_token(self, path, response, **kw):
+ def list_inbound_shipment_items_by_next_token(self, request, response, **kw):
"""Returns the next page of inbound shipment items using the
NextToken parameter.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Inbound', 2, 300, 'GetServiceStatus')
- def get_inbound_service_status(self, path, response, **kw):
+ def get_inbound_service_status(self, request, response, **kw):
"""Returns the operational status of the Fulfillment Inbound
Shipment API section.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['SellerSkus'], ['QueryStartDateTime'])
@structured_lists('SellerSkus.member')
@api_action('Inventory', 30, 0.5)
- def list_inventory_supply(self, path, response, **kw):
+ def list_inventory_supply(self, request, response, **kw):
"""Returns information about the availability of a seller's
inventory.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Inventory', 30, 0.5)
- def list_inventory_supply_by_next_token(self, path, response, **kw):
+ def list_inventory_supply_by_next_token(self, request, response, **kw):
"""Returns the next page of information about the availability
of a seller's inventory using the NextToken parameter.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Inventory', 2, 300, 'GetServiceStatus')
- def get_inventory_service_status(self, path, response, **kw):
+ def get_inventory_service_status(self, request, response, **kw):
"""Returns the operational status of the Fulfillment Inventory
API section.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['PackageNumber'])
@api_action('Outbound', 30, 0.5)
- def get_package_tracking_details(self, path, response, **kw):
+ def get_package_tracking_details(self, request, response, **kw):
"""Returns delivery tracking information for a package in
an outbound shipment for a Multi-Channel Fulfillment order.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
- @structured_objects('Address', 'Items')
@requires(['Address', 'Items'])
+ @structured_objects('Address', 'Items')
@api_action('Outbound', 30, 0.5)
- def get_fulfillment_preview(self, path, response, **kw):
+ def get_fulfillment_preview(self, request, response, **kw):
"""Returns a list of fulfillment order previews based on items
and shipping speed categories that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
- @structured_objects('DestinationAddress', 'Items')
@requires(['SellerFulfillmentOrderId', 'DisplayableOrderId',
'ShippingSpeedCategory', 'DisplayableOrderDateTime',
'DestinationAddress', 'DisplayableOrderComment',
'Items'])
+ @structured_objects('DestinationAddress', 'Items')
@api_action('Outbound', 30, 0.5)
- def create_fulfillment_order(self, path, response, **kw):
+ def create_fulfillment_order(self, request, response, **kw):
"""Requests that Amazon ship items from the seller's inventory
to a destination address.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['SellerFulfillmentOrderId'])
@api_action('Outbound', 30, 0.5)
- def get_fulfillment_order(self, path, response, **kw):
+ def get_fulfillment_order(self, request, response, **kw):
"""Returns a fulfillment order based on a specified
SellerFulfillmentOrderId.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Outbound', 30, 0.5)
- def list_all_fulfillment_orders(self, path, response, **kw):
+ def list_all_fulfillment_orders(self, request, response, **kw):
"""Returns a list of fulfillment orders fulfilled after (or
at) a specified date or by fulfillment method.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Outbound', 30, 0.5)
- def list_all_fulfillment_orders_by_next_token(self, path, response, **kw):
+ def list_all_fulfillment_orders_by_next_token(self, request, response, **kw):
"""Returns the next page of inbound shipment items using the
NextToken parameter.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['SellerFulfillmentOrderId'])
@api_action('Outbound', 30, 0.5)
- def cancel_fulfillment_order(self, path, response, **kw):
+ def cancel_fulfillment_order(self, request, response, **kw):
"""Requests that Amazon stop attempting to fulfill an existing
fulfillment order.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Outbound', 2, 300, 'GetServiceStatus')
- def get_outbound_service_status(self, path, response, **kw):
+ def get_outbound_service_status(self, request, response, **kw):
"""Returns the operational status of the Fulfillment Outbound
API section.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['CreatedAfter'], ['LastUpdatedAfter'])
+ @requires(['MarketplaceId'])
@exclusive(['CreatedAfter'], ['LastUpdatedAfter'])
@dependent('CreatedBefore', ['CreatedAfter'])
@exclusive(['LastUpdatedAfter'], ['BuyerEmail'], ['SellerOrderId'])
@dependent('LastUpdatedBefore', ['LastUpdatedAfter'])
@exclusive(['CreatedAfter'], ['LastUpdatedBefore'])
- @requires(['MarketplaceId'])
@structured_objects('OrderTotal', 'ShippingAddress',
'PaymentExecutionDetail')
@structured_lists('MarketplaceId.Id', 'OrderStatus.Status',
'FulfillmentChannel.Channel', 'PaymentMethod.')
@api_action('Orders', 6, 60)
- def list_orders(self, path, response, **kw):
+ def list_orders(self, request, response, **kw):
"""Returns a list of orders created or updated during a time
frame that you specify.
"""
@@ -681,149 +725,444 @@ class MWSConnection(AWSQueryConnection):
'BuyerEmail': toggle.union(['SellerOrderId']),
'SellerOrderId': toggle.union(['BuyerEmail']),
}.items():
- if do in kw and filter(kw.has_key, dont):
+ if do in kw and any(i in dont for i in kw):
message = "Don't include {0} when specifying " \
"{1}".format(' or '.join(dont), do)
raise AssertionError(message)
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Orders', 6, 60)
- def list_orders_by_next_token(self, path, response, **kw):
+ def list_orders_by_next_token(self, request, response, **kw):
"""Returns the next page of orders using the NextToken value
that was returned by your previous request to either
ListOrders or ListOrdersByNextToken.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['AmazonOrderId'])
@structured_lists('AmazonOrderId.Id')
@api_action('Orders', 6, 60)
- def get_order(self, path, response, **kw):
+ def get_order(self, request, response, **kw):
"""Returns an order for each AmazonOrderId that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['AmazonOrderId'])
@api_action('Orders', 30, 2)
- def list_order_items(self, path, response, **kw):
+ def list_order_items(self, request, response, **kw):
"""Returns order item information for an AmazonOrderId that
you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Orders', 30, 2)
- def list_order_items_by_next_token(self, path, response, **kw):
+ def list_order_items_by_next_token(self, request, response, **kw):
"""Returns the next page of order items using the NextToken
value that was returned by your previous request to either
ListOrderItems or ListOrderItemsByNextToken.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Orders', 2, 300, 'GetServiceStatus')
- def get_orders_service_status(self, path, response, **kw):
+ def get_orders_service_status(self, request, response, **kw):
"""Returns the operational status of the Orders API section.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'Query'])
@api_action('Products', 20, 20)
- def list_matching_products(self, path, response, **kw):
+ def list_matching_products(self, request, response, **kw):
"""Returns a list of products and their attributes, ordered
by relevancy, based on a search query that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 20)
- def get_matching_product(self, path, response, **kw):
+ def get_matching_product(self, request, response, **kw):
"""Returns a list of products and their attributes, based on
a list of ASIN values that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'IdType', 'IdList'])
@structured_lists('IdList.Id')
@api_action('Products', 20, 20)
- def get_matching_product_for_id(self, path, response, **kw):
+ def get_matching_product_for_id(self, request, response, **kw):
"""Returns a list of products and their attributes, based on
a list of Product IDs that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'SellerSKUList'])
@structured_lists('SellerSKUList.SellerSKU')
@api_action('Products', 20, 10, 'GetCompetitivePricingForSKU')
- def get_competitive_pricing_for_sku(self, path, response, **kw):
+ def get_competitive_pricing_for_sku(self, request, response, **kw):
"""Returns the current competitive pricing of a product,
based on the SellerSKUs and MarketplaceId that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 10, 'GetCompetitivePricingForASIN')
- def get_competitive_pricing_for_asin(self, path, response, **kw):
+ def get_competitive_pricing_for_asin(self, request, response, **kw):
"""Returns the current competitive pricing of a product,
based on the ASINs and MarketplaceId that you specify.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'SellerSKUList'])
@structured_lists('SellerSKUList.SellerSKU')
@api_action('Products', 20, 5, 'GetLowestOfferListingsForSKU')
- def get_lowest_offer_listings_for_sku(self, path, response, **kw):
+ def get_lowest_offer_listings_for_sku(self, request, response, **kw):
"""Returns the lowest price offer listings for a specific
product by item condition and SellerSKUs.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 5, 'GetLowestOfferListingsForASIN')
- def get_lowest_offer_listings_for_asin(self, path, response, **kw):
+ def get_lowest_offer_listings_for_asin(self, request, response, **kw):
"""Returns the lowest price offer listings for a specific
product by item condition and ASINs.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'SellerSKU'])
@api_action('Products', 20, 20, 'GetProductCategoriesForSKU')
- def get_product_categories_for_sku(self, path, response, **kw):
+ def get_product_categories_for_sku(self, request, response, **kw):
"""Returns the product categories that a SellerSKU belongs to.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['MarketplaceId', 'ASIN'])
@api_action('Products', 20, 20, 'GetProductCategoriesForASIN')
- def get_product_categories_for_asin(self, path, response, **kw):
+ def get_product_categories_for_asin(self, request, response, **kw):
"""Returns the product categories that an ASIN belongs to.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@api_action('Products', 2, 300, 'GetServiceStatus')
- def get_products_service_status(self, path, response, **kw):
+ def get_products_service_status(self, request, response, **kw):
"""Returns the operational status of the Products API section.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'SellerSKUList'])
+ @structured_lists('SellerSKUList.SellerSKU')
+ @api_action('Products', 20, 10, 'GetMyPriceForSKU')
+ def get_my_price_for_sku(self, request, response, **kw):
+ """Returns pricing information for your own offer listings, based on SellerSKU.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'ASINList'])
+ @structured_lists('ASINList.ASIN')
+ @api_action('Products', 20, 10, 'GetMyPriceForASIN')
+ def get_my_price_for_asin(self, request, response, **kw):
+ """Returns pricing information for your own offer listings, based on ASIN.
+ """
+ return self._post_request(request, kw, response)
@api_action('Sellers', 15, 60)
- def list_marketplace_participations(self, path, response, **kw):
+ def list_marketplace_participations(self, request, response, **kw):
"""Returns a list of marketplaces that the seller submitting
the request can sell in, and a list of participations that
include seller-specific information in that marketplace.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
@requires(['NextToken'])
@api_action('Sellers', 15, 60)
- def list_marketplace_participations_by_next_token(self, path, response,
+ def list_marketplace_participations_by_next_token(self, request, response,
**kw):
"""Returns the next page of marketplaces and participations
using the NextToken value that was returned by your
previous request to either ListMarketplaceParticipations
or ListMarketplaceParticipationsByNextToken.
"""
- return self.post_request(path, kw, response)
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId'])
+ @api_action('Recommendations', 5, 2)
+ def get_last_updated_time_for_recommendations(self, request, response,
+ **kw):
+ """Checks whether there are active recommendations for each category
+ for the given marketplace, and if there are, returns the time when
+ recommendations were last updated for each category.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId'])
+ @structured_lists('CategoryQueryList.CategoryQuery')
+ @api_action('Recommendations', 5, 2)
+ def list_recommendations(self, request, response, **kw):
+ """Returns your active recommendations for a specific category or for
+ all categories for a specific marketplace.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Recommendations', 5, 2)
+ def list_recommendations_by_next_token(self, request, response, **kw):
+ """Returns the next page of recommendations using the NextToken
+ parameter.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('Recommendations', 2, 300, 'GetServiceStatus')
+ def get_recommendations_service_status(self, request, response, **kw):
+ """Returns the operational status of the Recommendations API section.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('CustomerInfo', 15, 12)
+ def list_customers(self, request, response, **kw):
+ """Returns a list of customer accounts based on search criteria that
+ you specify.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('CustomerInfo', 50, 3)
+ def list_customers_by_next_token(self, request, response, **kw):
+ """Returns the next page of customers using the NextToken parameter.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['CustomerIdList'])
+ @structured_lists('CustomerIdList.CustomerId')
+ @api_action('CustomerInfo', 15, 12)
+ def get_customers_for_customer_id(self, request, response, **kw):
+ """Returns a list of customer accounts based on search criteria that
+ you specify.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('CustomerInfo', 2, 300, 'GetServiceStatus')
+ def get_customerinfo_service_status(self, request, response, **kw):
+ """Returns the operational status of the Customer Information API
+ section.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['DateRangeStart'])
+ @api_action('CartInfo', 15, 12)
+ def list_carts(self, request, response, **kw):
+ """Returns a list of shopping carts in your Webstore that were last
+ updated during the time range that you specify.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('CartInfo', 50, 3)
+ def list_carts_by_next_token(self, request, response, **kw):
+ """Returns the next page of shopping carts using the NextToken
+ parameter.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['CartIdList'])
+ @structured_lists('CartIdList.CartId')
+ @api_action('CartInfo', 15, 12)
+ def get_carts(self, request, response, **kw):
+ """Returns shopping carts based on the CartId values that you specify.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('CartInfo', 2, 300, 'GetServiceStatus')
+ def get_cartinfo_service_status(self, request, response, **kw):
+ """Returns the operational status of the Cart Information API section.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'Destination'])
+ @structured_objects('Destination', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def register_destination(self, request, response, **kw):
+ """Specifies a new destination where you want to receive notifications.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'Destination'])
+ @structured_objects('Destination', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def deregister_destination(self, request, response, **kw):
+ """Removes an existing destination from the list of registered
+ destinations.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId'])
+ @api_action('Subscriptions', 25, 0.5)
+ def list_registered_destinations(self, request, response, **kw):
+ """Lists all current destinations that you have registered.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'Destination'])
+ @structured_objects('Destination', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def send_test_notification_to_destination(self, request, response, **kw):
+ """Sends a test notification to an existing destination.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'Subscription'])
+ @structured_objects('Subscription', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def create_subscription(self, request, response, **kw):
+ """Creates a new subscription for the specified notification type
+ and destination.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'NotificationType', 'Destination'])
+ @structured_objects('Destination', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def get_subscription(self, request, response, **kw):
+ """Gets the subscription for the specified notification type and
+ destination.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'NotificationType', 'Destination'])
+ @structured_objects('Destination', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def delete_subscription(self, request, response, **kw):
+ """Deletes the subscription for the specified notification type and
+ destination.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId'])
+ @api_action('Subscriptions', 25, 0.5)
+ def list_subscriptions(self, request, response, **kw):
+ """Returns a list of all your current subscriptions.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['MarketplaceId', 'Subscription'])
+ @structured_objects('Subscription', members=True)
+ @api_action('Subscriptions', 25, 0.5)
+ def update_subscription(self, request, response, **kw):
+ """Updates the subscription for the specified notification type and
+ destination.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('Subscriptions', 2, 300, 'GetServiceStatus')
+ def get_subscriptions_service_status(self, request, response, **kw):
+ """Returns the operational status of the Subscriptions API section.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId', 'OrderReferenceAttributes'])
+ @structured_objects('OrderReferenceAttributes')
+ @api_action('OffAmazonPayments', 10, 1)
+ def set_order_reference_details(self, request, response, **kw):
+ """Sets order reference details such as the order total and a
+ description for the order.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId'])
+ @api_action('OffAmazonPayments', 20, 2)
+ def get_order_reference_details(self, request, response, **kw):
+ """Returns details about the Order Reference object and its current
+ state.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId'])
+ @api_action('OffAmazonPayments', 10, 1)
+ def confirm_order_reference(self, request, response, **kw):
+ """Confirms that the order reference is free of constraints and all
+ required information has been set on the order reference.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId'])
+ @api_action('OffAmazonPayments', 10, 1)
+ def cancel_order_reference(self, request, response, **kw):
+ """Cancel an order reference; all authorizations associated with
+ this order reference are also closed.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId'])
+ @api_action('OffAmazonPayments', 10, 1)
+ def close_order_reference(self, request, response, **kw):
+ """Confirms that an order reference has been fulfilled (fully
+ or partially) and that you do not expect to create any new
+ authorizations on this order reference.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonOrderReferenceId', 'AuthorizationReferenceId',
+ 'AuthorizationAmount'])
+ @structured_objects('AuthorizationAmount')
+ @api_action('OffAmazonPayments', 10, 1)
+ def authorize(self, request, response, **kw):
+ """Reserves a specified amount against the payment method(s) stored in
+ the order reference.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonAuthorizationId'])
+ @api_action('OffAmazonPayments', 20, 2)
+ def get_authorization_details(self, request, response, **kw):
+ """Returns the status of a particular authorization and the total
+ amount captured on the authorization.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonAuthorizationId', 'CaptureReferenceId', 'CaptureAmount'])
+ @structured_objects('CaptureAmount')
+ @api_action('OffAmazonPayments', 10, 1)
+ def capture(self, request, response, **kw):
+ """Captures funds from an authorized payment instrument.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonCaptureId'])
+ @api_action('OffAmazonPayments', 20, 2)
+ def get_capture_details(self, request, response, **kw):
+ """Returns the status of a particular capture and the total amount
+ refunded on the capture.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonAuthorizationId'])
+ @api_action('OffAmazonPayments', 10, 1)
+ def close_authorization(self, request, response, **kw):
+ """Closes an authorization.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonCaptureId', 'RefundReferenceId', 'RefundAmount'])
+ @structured_objects('RefundAmount')
+ @api_action('OffAmazonPayments', 10, 1)
+ def refund(self, request, response, **kw):
+ """Refunds a previously captured amount.
+ """
+ return self._post_request(request, kw, response)
+
+ @requires(['AmazonRefundId'])
+ @api_action('OffAmazonPayments', 20, 2)
+ def get_refund_details(self, request, response, **kw):
+ """Returns the status of a particular refund.
+ """
+ return self._post_request(request, kw, response)
+
+ @api_action('OffAmazonPayments', 2, 300, 'GetServiceStatus')
+ def get_offamazonpayments_service_status(self, request, response, **kw):
+ """Returns the operational status of the Off-Amazon Payments API
+ section.
+ """
+ return self._post_request(request, kw, response)
diff --git a/awx/lib/site-packages/boto/mws/exception.py b/awx/lib/site-packages/boto/mws/exception.py
index d84df4a853..fba8a5d5d2 100644
--- a/awx/lib/site-packages/boto/mws/exception.py
+++ b/awx/lib/site-packages/boto/mws/exception.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+# Copyright (c) 2012-2014 Andy Davidoff http://www.disruptek.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
@@ -19,19 +19,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.exception import BotoServerError
+from boto.mws.response import ResponseFactory
-class ResponseErrorFactory(BotoServerError):
+class ResponseErrorFactory(ResponseFactory):
- def __new__(cls, *args, **kw):
- error = BotoServerError(*args, **kw)
- try:
- newclass = globals()[error.error_code]
- except KeyError:
- newclass = ResponseError
- obj = newclass.__new__(newclass, *args, **kw)
- obj.__dict__.update(error.__dict__)
- return obj
+ def __call__(self, status, reason, body=None):
+ server = BotoServerError(status, reason, body=body)
+ supplied = self.find_element(server.error_code, '', ResponseError)
+ print(supplied.__name__)
+ return supplied(status, reason, body=body)
class ResponseError(BotoServerError):
@@ -41,16 +38,14 @@ class ResponseError(BotoServerError):
retry = False
def __repr__(self):
- return '{0}({1}, {2},\n\t{3})'.format(self.__class__.__name__,
- self.status, self.reason,
- self.error_message)
+ return '{0.__name__}({1.reason}: "{1.message}")' \
+ .format(self.__class__, self)
def __str__(self):
- return 'MWS Response Error: {0.status} {0.__class__.__name__} {1}\n' \
- '{2}\n' \
- '{0.error_message}'.format(self,
- self.retry and '(Retriable)' or '',
- self.__doc__.strip())
+ doc = self.__doc__ and self.__doc__.strip() + "\n" or ''
+ return '{1.__name__}: {0.reason} {2}\n{3}' \
+ '{0.message}'.format(self, self.__class__,
+ self.retry and '(Retriable)' or '', doc)
class RetriableResponseError(ResponseError):
diff --git a/awx/lib/site-packages/boto/mws/response.py b/awx/lib/site-packages/boto/mws/response.py
index 0960e46e5f..7e2e23c07e 100644
--- a/awx/lib/site-packages/boto/mws/response.py
+++ b/awx/lib/site-packages/boto/mws/response.py
@@ -1,24 +1,23 @@
-# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+# Copyright (c) 2012-2014 Andy Davidoff http://www.disruptek.com/
#
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish, dis-
-# tribute, sublicense, and/or sell copies of the Software, and to permit
-# persons to whom the Software is furnished to do so, subject to the fol-
-# lowing conditions:
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, dis- tribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the fol- lowing conditions:
#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
-# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-# IN THE SOFTWARE.
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- ITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from decimal import Decimal
+from boto.compat import filter, map
class ComplexType(dict):
@@ -62,10 +61,10 @@ class DeclarativeType(object):
setattr(self._parent, self._name, self._clone)
def start(self, *args, **kw):
- raise NotImplemented
+ raise NotImplementedError
def end(self, *args, **kw):
- raise NotImplemented
+ raise NotImplementedError
def teardown(self, *args, **kw):
setattr(self._parent, self._name, self._value)
@@ -133,14 +132,40 @@ class MemberList(Element):
super(MemberList, self).teardown(*args, **kw)
-def ResponseFactory(action, force=None):
- result = force or globals().get(action + 'Result', ResponseElement)
+class ResponseFactory(object):
+ def __init__(self, scopes=None):
+ self.scopes = [] if scopes is None else scopes
- class MWSResponse(Response):
- _name = action + 'Response'
+ def element_factory(self, name, parent):
+ class DynamicElement(parent):
+ _name = name
+ setattr(DynamicElement, '__name__', str(name))
+ return DynamicElement
- setattr(MWSResponse, action + 'Result', Element(result))
- return MWSResponse
+ def search_scopes(self, key):
+ for scope in self.scopes:
+ if hasattr(scope, key):
+ return getattr(scope, key)
+ if hasattr(scope, '__getitem__'):
+ if key in scope:
+ return scope[key]
+
+ def find_element(self, action, suffix, parent):
+ element = self.search_scopes(action + suffix)
+ if element is not None:
+ return element
+ if action.endswith('ByNextToken'):
+ element = self.search_scopes(action[:-len('ByNextToken')] + suffix)
+ if element is not None:
+ return self.element_factory(action + suffix, element)
+ return self.element_factory(action + suffix, parent)
+
+ def __call__(self, action, connection=None):
+ response = self.find_element(action, 'Response', Response)
+ if not hasattr(response, action + 'Result'):
+ result = self.find_element(action, 'Result', ResponseElement)
+ setattr(response, action + 'Result', Element(result))
+ return response(connection=connection)
def strip_namespace(func):
@@ -191,8 +216,6 @@ class ResponseElement(dict):
name = self.__class__.__name__
if name.startswith('JIT_'):
name = '^{0}^'.format(self._name or '')
- elif name == 'MWSResponse':
- name = '^{0}^'.format(self._name or name)
return '{0}{1!r}({2})'.format(
name, self.copy(), ', '.join(map(render, attrs)))
@@ -262,10 +285,6 @@ class GetFeedSubmissionListResult(ResponseElement):
FeedSubmissionInfo = ElementList(FeedSubmissionInfo)
-class GetFeedSubmissionListByNextTokenResult(GetFeedSubmissionListResult):
- pass
-
-
class GetFeedSubmissionCountResult(ResponseElement):
pass
@@ -290,10 +309,6 @@ class GetReportRequestListResult(RequestReportResult):
ReportRequestInfo = ElementList()
-class GetReportRequestListByNextTokenResult(GetReportRequestListResult):
- pass
-
-
class CancelReportRequestsResult(RequestReportResult):
pass
@@ -302,10 +317,6 @@ class GetReportListResult(ResponseElement):
ReportInfo = ElementList()
-class GetReportListByNextTokenResult(GetReportListResult):
- pass
-
-
class ManageReportScheduleResult(ResponseElement):
ReportSchedule = Element()
@@ -314,10 +325,6 @@ class GetReportScheduleListResult(ManageReportScheduleResult):
pass
-class GetReportScheduleListByNextTokenResult(GetReportScheduleListResult):
- pass
-
-
class UpdateReportAcknowledgementsResult(GetReportListResult):
pass
@@ -331,18 +338,10 @@ class ListInboundShipmentsResult(ResponseElement):
ShipmentData = MemberList(ShipFromAddress=Element())
-class ListInboundShipmentsByNextTokenResult(ListInboundShipmentsResult):
- pass
-
-
class ListInboundShipmentItemsResult(ResponseElement):
ItemData = MemberList()
-class ListInboundShipmentItemsByNextTokenResult(ListInboundShipmentItemsResult):
- pass
-
-
class ListInventorySupplyResult(ResponseElement):
InventorySupplyList = MemberList(
EarliestAvailability=Element(),
@@ -353,10 +352,6 @@ class ListInventorySupplyResult(ResponseElement):
)
-class ListInventorySupplyByNextTokenResult(ListInventorySupplyResult):
- pass
-
-
class ComplexAmount(ResponseElement):
_amount = 'Value'
@@ -472,10 +467,6 @@ class ListAllFulfillmentOrdersResult(ResponseElement):
FulfillmentOrders = MemberList(FulfillmentOrder)
-class ListAllFulfillmentOrdersByNextTokenResult(ListAllFulfillmentOrdersResult):
- pass
-
-
class GetPackageTrackingDetailsResult(ResponseElement):
ShipToAddress = Element()
TrackingEvents = MemberList(EventAddress=Element())
@@ -541,6 +532,11 @@ class LowestOfferListing(ResponseElement):
Price = Element(Price)
+class Offer(ResponseElement):
+ BuyingPrice = Element(Price)
+ RegularPrice = Element(ComplexMoney)
+
+
class Product(ResponseElement):
_namespace = 'ns2'
Identifiers = Element(MarketplaceASIN=Element(),
@@ -558,6 +554,9 @@ class Product(ResponseElement):
LowestOfferListings = Element(
LowestOfferListing=ElementList(LowestOfferListing),
)
+ Offers = Element(
+ Offer=ElementList(Offer),
+ )
class ListMatchingProductsResult(ResponseElement):
@@ -601,6 +600,14 @@ class GetLowestOfferListingsForASINResponse(ProductsBulkOperationResponse):
pass
+class GetMyPriceForSKUResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class GetMyPriceForASINResponse(ProductsBulkOperationResponse):
+ pass
+
+
class ProductCategory(ResponseElement):
def __init__(self, *args, **kw):
@@ -636,10 +643,6 @@ class ListOrdersResult(ResponseElement):
Orders = Element(Order=ElementList(Order))
-class ListOrdersByNextTokenResult(ListOrdersResult):
- pass
-
-
class GetOrderResult(ListOrdersResult):
pass
@@ -667,5 +670,118 @@ class ListMarketplaceParticipationsResult(ResponseElement):
ListMarketplaces = Element(Marketplace=ElementList())
-class ListMarketplaceParticipationsByNextTokenResult(ListMarketplaceParticipationsResult):
+class ListRecommendationsResult(ResponseElement):
+ ListingQualityRecommendations = MemberList(ItemIdentifier=Element())
+
+
+class Customer(ResponseElement):
+ PrimaryContactInfo = Element()
+ ShippingAddressList = Element(ShippingAddress=ElementList())
+ AssociatedMarketplaces = Element(MarketplaceDomain=ElementList())
+
+
+class ListCustomersResult(ResponseElement):
+ CustomerList = Element(Customer=ElementList(Customer))
+
+
+class GetCustomersForCustomerIdResult(ListCustomersResult):
+ pass
+
+
+class CartItem(ResponseElement):
+ CurrentPrice = Element(ComplexMoney)
+ SalePrice = Element(ComplexMoney)
+
+
+class Cart(ResponseElement):
+ ActiveCartItemList = Element(CartItem=ElementList(CartItem))
+ SavedCartItemList = Element(CartItem=ElementList(CartItem))
+
+
+class ListCartsResult(ResponseElement):
+ CartList = Element(Cart=ElementList(Cart))
+
+
+class GetCartsResult(ListCartsResult):
+ pass
+
+
+class Destination(ResponseElement):
+ AttributeList = MemberList()
+
+
+class ListRegisteredDestinationsResult(ResponseElement):
+ DestinationList = MemberList(Destination)
+
+
+class Subscription(ResponseElement):
+ Destination = Element(Destination)
+
+
+class GetSubscriptionResult(ResponseElement):
+ Subscription = Element(Subscription)
+
+
+class ListSubscriptionsResult(ResponseElement):
+ SubscriptionList = MemberList(Subscription)
+
+
+class OrderReferenceDetails(ResponseElement):
+ Buyer = Element()
+ OrderTotal = Element(ComplexMoney)
+ Destination = Element(PhysicalDestination=Element())
+ SellerOrderAttributes = Element()
+ OrderReferenceStatus = Element()
+ Constraints = ElementList()
+
+
+class SetOrderReferenceDetailsResult(ResponseElement):
+ OrderReferenceDetails = Element(OrderReferenceDetails)
+
+
+class GetOrderReferenceDetailsResult(SetOrderReferenceDetailsResult):
+ pass
+
+
+class AuthorizationDetails(ResponseElement):
+ AuthorizationAmount = Element(ComplexMoney)
+ CapturedAmount = Element(ComplexMoney)
+ AuthorizationFee = Element(ComplexMoney)
+ AuthorizationStatus = Element()
+
+
+class AuthorizeResult(ResponseElement):
+ AuthorizationDetails = Element(AuthorizationDetails)
+
+
+class GetAuthorizationDetailsResult(AuthorizeResult):
+ pass
+
+
+class CaptureDetails(ResponseElement):
+ CaptureAmount = Element(ComplexMoney)
+ RefundedAmount = Element(ComplexMoney)
+ CaptureFee = Element(ComplexMoney)
+ CaptureStatus = Element()
+
+
+class CaptureResult(ResponseElement):
+ CaptureDetails = Element(CaptureDetails)
+
+
+class GetCaptureDetailsResult(CaptureResult):
+ pass
+
+
+class RefundDetails(ResponseElement):
+ RefundAmount = Element(ComplexMoney)
+ FeeRefunded = Element(ComplexMoney)
+ RefundStatus = Element()
+
+
+class RefundResult(ResponseElement):
+ RefundDetails = Element(RefundDetails)
+
+
+class GetRefundDetails(RefundResult):
pass
diff --git a/awx/lib/site-packages/boto/opsworks/__init__.py b/awx/lib/site-packages/boto/opsworks/__init__.py
index 71bc720953..1ff5c0f6cf 100644
--- a/awx/lib/site-packages/boto/opsworks/__init__.py
+++ b/awx/lib/site-packages/boto/opsworks/__init__.py
@@ -25,7 +25,7 @@ from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
- Get all available regions for the Amazon Kinesis service.
+ Get all available regions for the Amazon OpsWorks service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
diff --git a/awx/lib/site-packages/boto/opsworks/layer1.py b/awx/lib/site-packages/boto/opsworks/layer1.py
index 6e8d24ba59..3b703ee164 100644
--- a/awx/lib/site-packages/boto/opsworks/layer1.py
+++ b/awx/lib/site-packages/boto/opsworks/layer1.py
@@ -20,16 +20,13 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.opsworks import exceptions
+from boto.compat import json
class OpsWorksConnection(AWSQueryConnection):
@@ -91,7 +88,7 @@ class OpsWorksConnection(AWSQueryConnection):
def __init__(self, **kwargs):
- region = kwargs.get('region')
+ region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
@@ -2580,7 +2577,7 @@ class OpsWorksConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -2591,4 +2588,3 @@ class OpsWorksConnection(AWSQueryConnection):
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
-
diff --git a/awx/lib/site-packages/boto/provider.py b/awx/lib/site-packages/boto/provider.py
index 2febdc9933..8e04bff328 100644
--- a/awx/lib/site-packages/boto/provider.py
+++ b/awx/lib/site-packages/boto/provider.py
@@ -27,10 +27,13 @@ This class encapsulates the provider-specific header differences.
"""
import os
+from boto.compat import six
from datetime import datetime
import boto
from boto import config
+from boto.compat import expanduser
+from boto.pyami.config import Config
from boto.gs.acl import ACL
from boto.gs.acl import CannedACLStrings as CannedGSACLStrings
from boto.s3.acl import CannedACLStrings as CannedS3ACLStrings
@@ -66,13 +69,16 @@ STORAGE_PERMISSIONS_ERROR = 'StoragePermissionsError'
STORAGE_RESPONSE_ERROR = 'StorageResponseError'
+class ProfileNotFoundError(ValueError): pass
+
+
class Provider(object):
CredentialMap = {
'aws': ('aws_access_key_id', 'aws_secret_access_key',
- 'aws_security_token'),
+ 'aws_security_token', 'aws_profile'),
'google': ('gs_access_key_id', 'gs_secret_access_key',
- None),
+ None, None),
}
AclClassMap = {
@@ -182,9 +188,17 @@ class Provider(object):
self.acl_class = self.AclClassMap[self.name]
self.canned_acls = self.CannedAclsMap[self.name]
self._credential_expiry_time = None
+
+ # Load shared credentials file if it exists
+ shared_path = os.path.join(expanduser('~'), '.' + name, 'credentials')
+ self.shared_credentials = Config(do_load=False)
+ if os.path.isfile(shared_path):
+ self.shared_credentials.load_from_path(shared_path)
+
self.get_credentials(access_key, secret_key, security_token, profile_name)
self.configure_headers()
self.configure_errors()
+
# Allow config file to override default host and port.
host_opt_name = '%s_host' % self.HostKeyMap[self.name]
if config.has_option('Credentials', host_opt_name):
@@ -247,16 +261,40 @@ class Provider(object):
def get_credentials(self, access_key=None, secret_key=None,
security_token=None, profile_name=None):
- access_key_name, secret_key_name, security_token_name = self.CredentialMap[self.name]
+ access_key_name, secret_key_name, security_token_name, \
+ profile_name_name = self.CredentialMap[self.name]
+
+ # Load profile from shared environment variable if it was not
+ # already passed in and the environment variable exists
+ if profile_name is None and profile_name_name is not None and \
+ profile_name_name.upper() in os.environ:
+ profile_name = os.environ[profile_name_name.upper()]
+
+ shared = self.shared_credentials
+
if access_key is not None:
self.access_key = access_key
boto.log.debug("Using access key provided by client.")
elif access_key_name.upper() in os.environ:
self.access_key = os.environ[access_key_name.upper()]
boto.log.debug("Using access key found in environment variable.")
- elif config.has_option("profile %s" % profile_name, access_key_name):
- self.access_key = config.get("profile %s" % profile_name, access_key_name)
- boto.log.debug("Using access key found in config file: profile %s." % profile_name)
+ elif profile_name is not None:
+ if shared.has_option(profile_name, access_key_name):
+ self.access_key = shared.get(profile_name, access_key_name)
+ boto.log.debug("Using access key found in shared credential "
+ "file for profile %s." % profile_name)
+ elif config.has_option("profile %s" % profile_name,
+ access_key_name):
+ self.access_key = config.get("profile %s" % profile_name,
+ access_key_name)
+ boto.log.debug("Using access key found in config file: "
+ "profile %s." % profile_name)
+ else:
+ raise ProfileNotFoundError('Profile "%s" not found!' %
+ profile_name)
+ elif shared.has_option('default', access_key_name):
+ self.access_key = shared.get('default', access_key_name)
+ boto.log.debug("Using access key found in shared credential file.")
elif config.has_option('Credentials', access_key_name):
self.access_key = config.get('Credentials', access_key_name)
boto.log.debug("Using access key found in config file.")
@@ -267,9 +305,22 @@ class Provider(object):
elif secret_key_name.upper() in os.environ:
self.secret_key = os.environ[secret_key_name.upper()]
boto.log.debug("Using secret key found in environment variable.")
- elif config.has_option("profile %s" % profile_name, secret_key_name):
- self.secret_key = config.get("profile %s" % profile_name, secret_key_name)
- boto.log.debug("Using secret key found in config file: profile %s." % profile_name)
+ elif profile_name is not None:
+ if shared.has_option(profile_name, secret_key_name):
+ self.secret_key = shared.get(profile_name, secret_key_name)
+ boto.log.debug("Using secret key found in shared credential "
+ "file for profile %s." % profile_name)
+ elif config.has_option("profile %s" % profile_name, secret_key_name):
+ self.secret_key = config.get("profile %s" % profile_name,
+ secret_key_name)
+ boto.log.debug("Using secret key found in config file: "
+ "profile %s." % profile_name)
+ else:
+ raise ProfileNotFoundError('Profile "%s" not found!' %
+ profile_name)
+ elif shared.has_option('default', secret_key_name):
+ self.secret_key = shared.get('default', secret_key_name)
+ boto.log.debug("Using secret key found in shared credential file.")
elif config.has_option('Credentials', secret_key_name):
self.secret_key = config.get('Credentials', secret_key_name)
boto.log.debug("Using secret key found in config file.")
@@ -299,6 +350,20 @@ class Provider(object):
self.security_token = os.environ[security_token_name.upper()]
boto.log.debug("Using security token found in environment"
" variable.")
+ elif shared.has_option(profile_name or 'default',
+ security_token_name):
+ self.security_token = shared.get(profile_name or 'default',
+ security_token_name)
+ boto.log.debug("Using security token found in shared "
+ "credential file.")
+ elif profile_name is not None:
+ if config.has_option("profile %s" % profile_name,
+ security_token_name):
+ boto.log.debug("config has option")
+ self.security_token = config.get("profile %s" % profile_name,
+ security_token_name)
+ boto.log.debug("Using security token found in config file: "
+ "profile %s." % profile_name)
elif config.has_option('Credentials', security_token_name):
self.security_token = config.get('Credentials',
security_token_name)
@@ -324,7 +389,7 @@ class Provider(object):
data='meta-data/iam/security-credentials/')
if metadata:
# I'm assuming there's only one role on the instance profile.
- security = metadata.values()[0]
+ security = list(metadata.values())[0]
self._access_key = security['AccessKeyId']
self._secret_key = self._convert_key_to_str(security['SecretAccessKey'])
self._security_token = security['Token']
@@ -335,7 +400,7 @@ class Provider(object):
self._credential_expiry_time - datetime.now(), expires_at)
def _convert_key_to_str(self, key):
- if isinstance(key, unicode):
+ if isinstance(key, six.text_type):
# the secret key must be bytes and not unicode to work
# properly with hmac.new (see http://bugs.python.org/issue5285)
return str(key)
diff --git a/awx/lib/site-packages/boto/pyami/bootstrap.py b/awx/lib/site-packages/boto/pyami/bootstrap.py
index f0b353de21..82c2822edd 100644
--- a/awx/lib/site-packages/boto/pyami/bootstrap.py
+++ b/awx/lib/site-packages/boto/pyami/bootstrap.py
@@ -82,7 +82,7 @@ class Bootstrap(ScriptBase):
try:
self.run('git pull', cwd=location)
num_remaining_attempts = 0
- except Exception, e:
+ except Exception as e:
boto.log.info('git pull attempt failed with the following exception. Trying again in a bit. %s', e)
time.sleep(2)
if update.find(':') >= 0:
diff --git a/awx/lib/site-packages/boto/pyami/config.py b/awx/lib/site-packages/boto/pyami/config.py
index 6669cc052e..37445f85c3 100644
--- a/awx/lib/site-packages/boto/pyami/config.py
+++ b/awx/lib/site-packages/boto/pyami/config.py
@@ -20,20 +20,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-import StringIO, os, re
+import os
+import re
import warnings
-import ConfigParser
+
import boto
-# If running in Google App Engine there is no "user" and
-# os.path.expanduser() will fail. Attempt to detect this case and use a
-# no-op expanduser function in this case.
-try:
- os.path.expanduser('~')
- expanduser = os.path.expanduser
-except (AttributeError, ImportError):
- # This is probably running on App Engine.
- expanduser = (lambda x: x)
+from boto.compat import expanduser, ConfigParser, StringIO
+
# By default we use two locations for the boto configurations,
# /etc/boto.cfg and ~/.boto (which works on Windows and Unix).
@@ -55,13 +49,13 @@ elif 'BOTO_PATH' in os.environ:
BotoConfigLocations.append(expanduser(path))
-class Config(ConfigParser.SafeConfigParser):
+class Config(ConfigParser):
def __init__(self, path=None, fp=None, do_load=True):
# We don't use ``super`` here, because ``ConfigParser`` still uses
# old-style classes.
- ConfigParser.SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami',
- 'debug' : '0'})
+ ConfigParser.__init__(self, {'working_dir': '/mnt/pyami',
+ 'debug': '0'})
if do_load:
if path:
self.load_from_path(path)
@@ -78,7 +72,7 @@ class Config(ConfigParser.SafeConfigParser):
def load_credential_file(self, path):
"""Load a credential file as is setup like the Java utilities"""
- c_data = StringIO.StringIO()
+ c_data = StringIO()
c_data.write("[Credentials]\n")
for line in open(path, "r").readlines():
c_data.write(line.replace("AWSAccessKeyId", "aws_access_key_id").replace("AWSSecretKey", "aws_secret_access_key"))
@@ -101,7 +95,7 @@ class Config(ConfigParser.SafeConfigParser):
Replace any previous value. If the path doesn't exist, create it.
Also add the option the the in-memory config.
"""
- config = ConfigParser.SafeConfigParser()
+ config = ConfigParser()
config.read(path)
if not config.has_section(section):
config.add_section(section)
@@ -145,21 +139,21 @@ class Config(ConfigParser.SafeConfigParser):
def get(self, section, name, default=None):
try:
- val = ConfigParser.SafeConfigParser.get(self, section, name)
+ val = ConfigParser.get(self, section, name)
except:
val = default
return val
def getint(self, section, name, default=0):
try:
- val = ConfigParser.SafeConfigParser.getint(self, section, name)
+ val = ConfigParser.getint(self, section, name)
except:
val = int(default)
return val
def getfloat(self, section, name, default=0.0):
try:
- val = ConfigParser.SafeConfigParser.getfloat(self, section, name)
+ val = ConfigParser.getfloat(self, section, name)
except:
val = float(default)
return val
@@ -182,13 +176,13 @@ class Config(ConfigParser.SafeConfigParser):
self.set(section, name, 'false')
def dump(self):
- s = StringIO.StringIO()
+ s = StringIO()
self.write(s)
- print s.getvalue()
+ print(s.getvalue())
def dump_safe(self, fp=None):
if not fp:
- fp = StringIO.StringIO()
+ fp = StringIO()
for section in self.sections():
fp.write('[%s]\n' % section)
for option in self.options(section):
diff --git a/awx/lib/site-packages/boto/pyami/copybot.py b/awx/lib/site-packages/boto/pyami/copybot.py
index 02d8bb22c8..09a6d444c5 100644
--- a/awx/lib/site-packages/boto/pyami/copybot.py
+++ b/awx/lib/site-packages/boto/pyami/copybot.py
@@ -94,4 +94,3 @@ class CopyBot(ScriptBase):
if boto.config.getbool(self.name, 'exit_on_completion', True):
ec2 = boto.connect_ec2()
ec2.terminate_instances([self.instance_id])
-
diff --git a/awx/lib/site-packages/boto/pyami/helloworld.py b/awx/lib/site-packages/boto/pyami/helloworld.py
index 680873ce17..b9b53b60c5 100644
--- a/awx/lib/site-packages/boto/pyami/helloworld.py
+++ b/awx/lib/site-packages/boto/pyami/helloworld.py
@@ -25,4 +25,3 @@ class HelloWorld(ScriptBase):
def main(self):
self.log('Hello World!!!')
-
diff --git a/awx/lib/site-packages/boto/pyami/installers/__init__.py b/awx/lib/site-packages/boto/pyami/installers/__init__.py
index cc689264bc..44abd0d24a 100644
--- a/awx/lib/site-packages/boto/pyami/installers/__init__.py
+++ b/awx/lib/site-packages/boto/pyami/installers/__init__.py
@@ -61,4 +61,3 @@ class Installer(ScriptBase):
Do whatever is necessary to "install" the package.
"""
raise NotImplementedError
-
diff --git a/awx/lib/site-packages/boto/pyami/installers/ubuntu/ebs.py b/awx/lib/site-packages/boto/pyami/installers/ubuntu/ebs.py
index 34d635fcc4..54a479859d 100644
--- a/awx/lib/site-packages/boto/pyami/installers/ubuntu/ebs.py
+++ b/awx/lib/site-packages/boto/pyami/installers/ubuntu/ebs.py
@@ -114,7 +114,7 @@ class EBSInstaller(Installer):
if self.logical_volume_name:
# if a logical volume was specified, override the specified volume_id
# (if there was one) with the current AWS volume for the logical volume:
- logical_volume = Volume.find(name = self.logical_volume_name).next()
+ logical_volume = next(Volume.find(name=self.logical_volume_name))
self.volume_id = logical_volume._volume_id
volume = ec2.get_all_volumes([self.volume_id])[0]
# wait for the volume to be available. The volume may still be being created
@@ -128,7 +128,7 @@ class EBSInstaller(Installer):
try:
ec2.attach_volume(self.volume_id, self.instance_id, self.device)
attempt_attach = False
- except EC2ResponseError, e:
+ except EC2ResponseError as e:
if e.error_code != 'IncorrectState':
# if there's an EC2ResonseError with the code set to IncorrectState, delay a bit for ec2
# to realize the instance is running, then try again. Otherwise, raise the error:
@@ -157,7 +157,7 @@ class EBSInstaller(Installer):
fp.close()
self.run('chmod +x /usr/local/bin/ebs_backup')
- def create_backup_cleanup_script(self, use_tag_based_cleanup = False):
+ def create_backup_cleanup_script(self, use_tag_based_cleanup=False):
fp = open('/usr/local/bin/ebs_backup_cleanup', 'w')
if use_tag_based_cleanup:
fp.write(TagBasedBackupCleanupScript)
@@ -225,7 +225,7 @@ class EBSInstaller(Installer):
# volume. Check for the presence of the new configuration flag, and use the appropriate
# cleanup method / script:
use_tag_based_cleanup = boto.config.has_option('EBS', 'use_tag_based_snapshot_cleanup')
- self.create_backup_cleanup_script(use_tag_based_cleanup);
+ self.create_backup_cleanup_script(use_tag_based_cleanup)
self.add_cron("ebs_backup_cleanup", "/usr/local/bin/ebs_backup_cleanup", minute=minute, hour=hour)
# Set up the fstab
diff --git a/awx/lib/site-packages/boto/pyami/installers/ubuntu/installer.py b/awx/lib/site-packages/boto/pyami/installers/ubuntu/installer.py
index 370d63fd7b..5a2abd908b 100644
--- a/awx/lib/site-packages/boto/pyami/installers/ubuntu/installer.py
+++ b/awx/lib/site-packages/boto/pyami/installers/ubuntu/installer.py
@@ -56,7 +56,7 @@ class Installer(boto.pyami.installers.Installer):
f = open(f_path, "w")
f.write(file)
f.close()
- os.chmod(f_path, stat.S_IREAD| stat.S_IWRITE | stat.S_IEXEC)
+ os.chmod(f_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
self.run("/usr/sbin/update-rc.d %s defaults" % name)
def add_env(self, key, value):
@@ -84,13 +84,11 @@ class Installer(boto.pyami.installers.Installer):
Create a user on the local system
"""
self.run("useradd -m %s" % user)
- usr = getpwnam(user)
+ usr = getpwnam(user)
return usr
-
def install(self):
"""
This is the only method you need to override
"""
raise NotImplementedError
-
diff --git a/awx/lib/site-packages/boto/pyami/installers/ubuntu/mysql.py b/awx/lib/site-packages/boto/pyami/installers/ubuntu/mysql.py
index 490e5dbb4f..5b0792ba9d 100644
--- a/awx/lib/site-packages/boto/pyami/installers/ubuntu/mysql.py
+++ b/awx/lib/site-packages/boto/pyami/installers/ubuntu/mysql.py
@@ -31,7 +31,7 @@ from boto.pyami.installers.ubuntu.installer import Installer
import os
import boto
from boto.utils import ShellCommand
-from ConfigParser import SafeConfigParser
+from boto.compat import ConfigParser
import time
ConfigSection = """
@@ -55,18 +55,18 @@ class MySQL(Installer):
def change_data_dir(self, password=None):
data_dir = boto.config.get('MySQL', 'data_dir', '/mnt')
- fresh_install = False;
+ fresh_install = False
is_mysql_running_command = ShellCommand('mysqladmin ping') # exit status 0 if mysql is running
is_mysql_running_command.run()
if is_mysql_running_command.getStatus() == 0:
- # mysql is running. This is the state apt-get will leave it in. If it isn't running,
+ # mysql is running. This is the state apt-get will leave it in. If it isn't running,
# that means mysql was already installed on the AMI and there's no need to stop it,
# saving 40 seconds on instance startup.
time.sleep(10) #trying to stop mysql immediately after installing it fails
# We need to wait until mysql creates the root account before we kill it
# or bad things will happen
i = 0
- while self.run("echo 'quit' | mysql -u root") != 0 and i<5:
+ while self.run("echo 'quit' | mysql -u root") != 0 and i < 5:
time.sleep(5)
i = i + 1
self.run('/etc/init.d/mysql stop')
@@ -75,7 +75,7 @@ class MySQL(Installer):
mysql_path = os.path.join(data_dir, 'mysql')
if not os.path.exists(mysql_path):
self.run('mkdir %s' % mysql_path)
- fresh_install = True;
+ fresh_install = True
self.run('chown -R mysql:mysql %s' % mysql_path)
fp = open('/etc/mysql/conf.d/use_mnt.cnf', 'w')
fp.write('# created by pyami\n')
@@ -89,7 +89,7 @@ class MySQL(Installer):
self.start('mysql')
else:
#get the password ubuntu expects to use:
- config_parser = SafeConfigParser()
+ config_parser = ConfigParser()
config_parser.read('/etc/mysql/debian.cnf')
password = config_parser.get('client', 'password')
# start the mysql deamon, then mysql with the required grant statement piped into it:
@@ -106,4 +106,3 @@ class MySQL(Installer):
# and changing that is too ugly to be worth it:
#self.set_root_password()
self.change_data_dir()
-
diff --git a/awx/lib/site-packages/boto/pyami/installers/ubuntu/trac.py b/awx/lib/site-packages/boto/pyami/installers/ubuntu/trac.py
index ef83af7aac..8c51c8f720 100644
--- a/awx/lib/site-packages/boto/pyami/installers/ubuntu/trac.py
+++ b/awx/lib/site-packages/boto/pyami/installers/ubuntu/trac.py
@@ -52,7 +52,7 @@ class Trac(Installer):
self.run("a2enmod mod_python")
self.run("a2enmod dav_svn")
self.run("a2enmod rewrite")
- # Make sure that boto.log is writable by everyone so that subversion post-commit hooks can
+ # Make sure that boto.log is writable by everyone so that subversion post-commit hooks can
# write to it.
self.run("touch /var/log/boto.log")
self.run("chmod a+w /var/log/boto.log")
diff --git a/awx/lib/site-packages/boto/pyami/launch_ami.py b/awx/lib/site-packages/boto/pyami/launch_ami.py
index 243d56d2eb..9037217b61 100644
--- a/awx/lib/site-packages/boto/pyami/launch_ami.py
+++ b/awx/lib/site-packages/boto/pyami/launch_ami.py
@@ -29,7 +29,7 @@ import boto
usage_string = """
SYNOPSIS
launch_ami.py -a ami_id [-b script_bucket] [-s script_name]
- [-m module] [-c class_name] [-r]
+ [-m module] [-c class_name] [-r]
[-g group] [-k key_name] [-n num_instances]
[-w] [extra_data]
Where:
@@ -68,7 +68,7 @@ SYNOPSIS
"""
def usage():
- print usage_string
+ print(usage_string)
sys.exit()
def main():
@@ -80,16 +80,16 @@ def main():
'reload', 'script_name', 'wait'])
except:
usage()
- params = {'module_name' : None,
- 'script_name' : None,
- 'class_name' : None,
- 'script_bucket' : None,
- 'group' : 'default',
- 'keypair' : None,
- 'ami' : None,
- 'num_instances' : 1,
- 'input_queue_name' : None,
- 'output_queue_name' : None}
+ params = {'module_name': None,
+ 'script_name': None,
+ 'class_name': None,
+ 'script_bucket': None,
+ 'group': 'default',
+ 'keypair': None,
+ 'ami': None,
+ 'num_instances': 1,
+ 'input_queue_name': None,
+ 'output_queue_name': None}
reload = None
wait = None
for o, a in opts:
@@ -124,18 +124,18 @@ def main():
required = ['ami']
for pname in required:
if not params.get(pname, None):
- print '%s is required' % pname
+ print('%s is required' % pname)
usage()
if params['script_name']:
# first copy the desired module file to S3 bucket
if reload:
- print 'Reloading module %s to S3' % params['script_name']
+ print('Reloading module %s to S3' % params['script_name'])
else:
- print 'Copying module %s to S3' % params['script_name']
+ print('Copying module %s to S3' % params['script_name'])
l = imp.find_module(params['script_name'])
c = boto.connect_s3()
bucket = c.get_bucket(params['script_bucket'])
- key = bucket.new_key(params['script_name']+'.py')
+ key = bucket.new_key(params['script_name'] + '.py')
key.set_contents_from_file(l[0])
params['script_md5'] = key.md5
# we have everything we need, now build userdata string
@@ -155,24 +155,23 @@ def main():
r = img.run(user_data=s, key_name=params['keypair'],
security_groups=[params['group']],
max_count=params.get('num_instances', 1))
- print 'AMI: %s - %s (Started)' % (params['ami'], img.location)
- print 'Reservation %s contains the following instances:' % r.id
+ print('AMI: %s - %s (Started)' % (params['ami'], img.location))
+ print('Reservation %s contains the following instances:' % r.id)
for i in r.instances:
- print '\t%s' % i.id
+ print('\t%s' % i.id)
if wait:
running = False
while not running:
time.sleep(30)
[i.update() for i in r.instances]
status = [i.state for i in r.instances]
- print status
+ print(status)
if status.count('running') == len(r.instances):
running = True
for i in r.instances:
- print 'Instance: %s' % i.ami_launch_index
- print 'Public DNS Name: %s' % i.public_dns_name
- print 'Private DNS Name: %s' % i.private_dns_name
+ print('Instance: %s' % i.ami_launch_index)
+ print('Public DNS Name: %s' % i.public_dns_name)
+ print('Private DNS Name: %s' % i.private_dns_name)
if __name__ == "__main__":
main()
-
diff --git a/awx/lib/site-packages/boto/pyami/scriptbase.py b/awx/lib/site-packages/boto/pyami/scriptbase.py
index 8e8cb0c0fb..d99a2b46e0 100644
--- a/awx/lib/site-packages/boto/pyami/scriptbase.py
+++ b/awx/lib/site-packages/boto/pyami/scriptbase.py
@@ -32,13 +32,12 @@ class ScriptBase(object):
if self.last_command.status != 0:
boto.log.error('Error running command: "%s". Output: "%s"' % (command, self.last_command.output))
if notify:
- self.notify('Error encountered', \
- 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \
- (command, self.last_command.output))
+ self.notify('Error encountered',
+ 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \
+ (command, self.last_command.output))
if exit_on_error:
sys.exit(-1)
return self.last_command.status
def main(self):
pass
-
diff --git a/awx/lib/site-packages/boto/pyami/startup.py b/awx/lib/site-packages/boto/pyami/startup.py
index 2093151a7b..4bd9dadd89 100644
--- a/awx/lib/site-packages/boto/pyami/startup.py
+++ b/awx/lib/site-packages/boto/pyami/startup.py
@@ -37,14 +37,14 @@ class Startup(ScriptBase):
pos = script.rfind('.')
if pos > 0:
mod_name = script[0:pos]
- cls_name = script[pos+1:]
+ cls_name = script[pos + 1:]
cls = find_class(mod_name, cls_name)
boto.log.info('Running Script: %s' % script)
s = cls()
s.main()
else:
boto.log.warning('Trouble parsing script: %s' % script)
- except Exception, e:
+ except Exception as e:
boto.log.exception('Problem Running Script: %s. Startup process halting.' % script)
raise e
diff --git a/awx/lib/site-packages/boto/rds2/layer1.py b/awx/lib/site-packages/boto/rds2/layer1.py
index 1e2ba53793..5615f1107d 100644
--- a/awx/lib/site-packages/boto/rds2/layer1.py
+++ b/awx/lib/site-packages/boto/rds2/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.rds2 import exceptions
+from boto.compat import json
class RDSConnection(AWSQueryConnection):
@@ -1011,7 +1007,7 @@ class RDSConnection(AWSQueryConnection):
:param subnet_ids: The EC2 Subnet IDs for the DB subnet group.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags into tuples.
"""
params = {
diff --git a/awx/lib/site-packages/boto/redshift/layer1.py b/awx/lib/site-packages/boto/redshift/layer1.py
index 02aaff1588..2317f5d2d8 100644
--- a/awx/lib/site-packages/boto/redshift/layer1.py
+++ b/awx/lib/site-packages/boto/redshift/layer1.py
@@ -2930,7 +2930,7 @@ class RedshiftConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
path='/', params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/awx/lib/site-packages/boto/regioninfo.py b/awx/lib/site-packages/boto/regioninfo.py
index 29ebb1e30b..5862f16d6a 100644
--- a/awx/lib/site-packages/boto/regioninfo.py
+++ b/awx/lib/site-packages/boto/regioninfo.py
@@ -20,7 +20,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from __future__ import with_statement
import os
import boto
@@ -87,8 +86,8 @@ def load_regions():
# Try the ENV var. If not, check the config file.
if os.environ.get('BOTO_ENDPOINTS'):
additional_path = os.environ['BOTO_ENDPOINTS']
- elif boto.config.get('boto', 'endpoints_path'):
- additional_path = boto.config.get('boto', 'endpoints_path')
+ elif boto.config.get('Boto', 'endpoints_path'):
+ additional_path = boto.config.get('Boto', 'endpoints_path')
# If there's a file provided, we'll load it & additively merge it into
# the endpoints.
diff --git a/awx/lib/site-packages/boto/roboto/awsqueryrequest.py b/awx/lib/site-packages/boto/roboto/awsqueryrequest.py
index 1e15c4f872..793adf90ec 100644
--- a/awx/lib/site-packages/boto/roboto/awsqueryrequest.py
+++ b/awx/lib/site-packages/boto/roboto/awsqueryrequest.py
@@ -19,7 +19,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import sys
import os
import boto
@@ -47,10 +46,10 @@ def boto_except_hook(debugger_flag, debug_flag):
else:
debugger.post_mortem(tb)
elif debug_flag:
- print traceback.print_tb(tb)
+ print(traceback.print_tb(tb))
sys.exit(1)
else:
- print value
+ print(value)
sys.exit(1)
return excepthook
@@ -69,7 +68,7 @@ class Line(object):
def print_it(self):
if not self.printed:
- print self.line
+ print(self.line)
self.printed = True
class RequiredParamError(boto.exception.BotoClientError):
@@ -342,9 +341,9 @@ class AWSQueryRequest(object):
def process_standard_options(self, options, args, d):
if hasattr(options, 'help_filters') and options.help_filters:
- print 'Available filters:'
+ print('Available filters:')
for filter in self.Filters:
- print '%s\t%s' % (filter.name, filter.doc)
+ print('%s\t%s' % (filter.name, filter.doc))
sys.exit(0)
if options.debug:
self.args['debug'] = 2
@@ -358,7 +357,7 @@ class AWSQueryRequest(object):
self.args['aws_secret_access_key'] = options.secret_key
if options.version:
# TODO - Where should the version # come from?
- print 'version x.xx'
+ print('version x.xx')
exit(0)
sys.excepthook = boto_except_hook(options.debugger,
options.debug)
@@ -452,17 +451,17 @@ class AWSQueryRequest(object):
try:
response = self.main()
self.cli_formatter(response)
- except RequiredParamError, e:
- print e
+ except RequiredParamError as e:
+ print(e)
sys.exit(1)
- except self.ServiceClass.ResponseError, err:
- print 'Error(%s): %s' % (err.error_code, err.error_message)
+ except self.ServiceClass.ResponseError as err:
+ print('Error(%s): %s' % (err.error_code, err.error_message))
sys.exit(1)
- except boto.roboto.awsqueryservice.NoCredentialsError, err:
- print 'Unable to find credentials.'
+ except boto.roboto.awsqueryservice.NoCredentialsError as err:
+ print('Unable to find credentials.')
sys.exit(1)
- except Exception, e:
- print e
+ except Exception as e:
+ print(e)
sys.exit(1)
def _generic_cli_formatter(self, fmt, data, label=''):
diff --git a/awx/lib/site-packages/boto/roboto/awsqueryservice.py b/awx/lib/site-packages/boto/roboto/awsqueryservice.py
index cb3a21d085..9bf95ac2be 100644
--- a/awx/lib/site-packages/boto/roboto/awsqueryservice.py
+++ b/awx/lib/site-packages/boto/roboto/awsqueryservice.py
@@ -1,10 +1,11 @@
+from __future__ import print_function
import os
import urlparse
import boto
import boto.connection
import boto.jsonresponse
import boto.exception
-import awsqueryrequest
+from boto.roboto import awsqueryrequest
class NoCredentialsError(boto.exception.BotoClientError):
@@ -48,7 +49,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection):
def check_for_credential_file(self):
"""
- Checks for the existance of an AWS credential file.
+ Checks for the existence of an AWS credential file.
If the environment variable AWS_CREDENTIAL_FILE is
set and points to a file, that file will be read and
will be searched credentials.
@@ -77,7 +78,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection):
value = value.strip()
self.args['aws_secret_access_key'] = value
else:
- print 'Warning: unable to read AWS_CREDENTIAL_FILE'
+ print('Warning: unable to read AWS_CREDENTIAL_FILE')
def check_for_env_url(self):
"""
diff --git a/awx/lib/site-packages/boto/roboto/param.py b/awx/lib/site-packages/boto/roboto/param.py
index ed3e6be9b9..35a25b4af5 100644
--- a/awx/lib/site-packages/boto/roboto/param.py
+++ b/awx/lib/site-packages/boto/roboto/param.py
@@ -46,7 +46,7 @@ class Converter(object):
@classmethod
def convert_file(cls, param, value):
- if os.path.isfile(value):
+ if os.path.exists(value) and not os.path.isdir(value):
return value
raise ValueError
diff --git a/awx/lib/site-packages/boto/route53/connection.py b/awx/lib/site-packages/boto/route53/connection.py
index 7f45c778ce..2cab2359f8 100644
--- a/awx/lib/site-packages/boto/route53/connection.py
+++ b/awx/lib/site-packages/boto/route53/connection.py
@@ -24,9 +24,8 @@
# IN THE SOFTWARE.
#
-import exception
+from boto.route53 import exception
import random
-import urllib
import uuid
import xml.sax
@@ -36,6 +35,7 @@ from boto import handler
import boto.jsonresponse
from boto.route53.record import ResourceRecordSets
from boto.route53.zone import Zone
+from boto.compat import six, urllib
HZXML = """
@@ -79,10 +79,10 @@ class Route53Connection(AWSAuthConnection):
def make_request(self, action, path, headers=None, data='', params=None):
if params:
pairs = []
- for key, val in params.iteritems():
+ for key, val in six.iteritems(params):
if val is None:
continue
- pairs.append(key + '=' + urllib.quote(str(val)))
+ pairs.append(key + '=' + urllib.parse.quote(str(val)))
path += '?' + '&'.join(pairs)
return super(Route53Connection, self).make_request(action, path,
headers, data,
@@ -213,6 +213,13 @@ class Route53Connection(AWSAuthConnection):
body)
def delete_hosted_zone(self, hosted_zone_id):
+ """
+ Delete the hosted zone specified by the given id.
+
+ :type hosted_zone_id: str
+ :param hosted_zone_id: The hosted zone's id
+
+ """
uri = '/%s/hostedzone/%s' % (self.Version, hosted_zone_id)
response = self.make_request('DELETE', uri)
body = response.read()
@@ -480,6 +487,10 @@ class Route53Connection(AWSAuthConnection):
"""
Returns a list of Zone objects, one for each of the Hosted
Zones defined for the AWS account.
+
+ :rtype: list
+ :returns: A list of Zone objects.
+
"""
zones = self.get_all_hosted_zones()
return [Zone(self, zone) for zone in
@@ -519,7 +530,8 @@ class Route53Connection(AWSAuthConnection):
'PriorRequestNotComplete',
i
)
- next_sleep = random.random() * (2 ** i)
+ next_sleep = min(random.random() * (2 ** i),
+ boto.config.get('Boto', 'max_retry_delay', 60))
i += 1
status = (msg, i, next_sleep)
diff --git a/awx/lib/site-packages/boto/route53/healthcheck.py b/awx/lib/site-packages/boto/route53/healthcheck.py
index 059d208b4b..85d6919b68 100644
--- a/awx/lib/site-packages/boto/route53/healthcheck.py
+++ b/awx/lib/site-packages/boto/route53/healthcheck.py
@@ -40,8 +40,11 @@ POST /2013-04-01/healthcheck HTTP/1.1
if Type is HTTP_STR_MATCH or HTTPS_STR_MATCH,
the string to search for in the response body
from the specified resource
+ 10 | 30
+ integer between 1 and 10
-
+"""
class HealthCheck(object):
@@ -50,16 +53,19 @@ class HealthCheck(object):
POSTXMLBody = """
- %(ip_addr)s
+ %(ip_addr_part)s
%(port)s
%(type)s
%(resource_path)s
%(fqdn_part)s
%(string_match_part)s
%(request_interval)s
+ %(failure_threshold)s
"""
+ XMLIpAddrPart = """%(ip_addr)s"""
+
XMLFQDNPart = """%(fqdn)s"""
XMLStringMatchPart = """%(string_match)s"""
@@ -68,18 +74,18 @@ class HealthCheck(object):
valid_request_intervals = (10, 30)
- def __init__(self, ip_addr, port, hc_type, resource_path, fqdn=None, string_match=None, request_interval=30):
+ def __init__(self, ip_addr, port, hc_type, resource_path, fqdn=None, string_match=None, request_interval=30, failure_threshold=3):
"""
HealthCheck object
:type ip_addr: str
- :param ip_addr: IP Address
+ :param ip_addr: Optional IP Address
:type port: int
:param port: Port to check
:type hc_type: str
- :param ip_addr: One of HTTP | HTTPS | HTTP_STR_MATCH | HTTPS_STR_MATCH | TCP
+ :param hc_type: One of HTTP | HTTPS | HTTP_STR_MATCH | HTTPS_STR_MATCH | TCP
:type resource_path: str
:param resource_path: Path to check
@@ -93,6 +99,9 @@ class HealthCheck(object):
:type request_interval: int
:param request_interval: The number of seconds between the time that Amazon Route 53 gets a response from your endpoint and the time that it sends the next health-check request.
+ :type failure_threshold: int
+ :param failure_threshold: The number of consecutive health checks that an endpoint must pass or fail for Amazon Route 53 to change the current status of the endpoint from unhealthy to healthy or vice versa.
+
"""
self.ip_addr = ip_addr
self.port = port
@@ -100,6 +109,7 @@ class HealthCheck(object):
self.resource_path = resource_path
self.fqdn = fqdn
self.string_match = string_match
+ self.failure_threshold = failure_threshold
if request_interval in self.valid_request_intervals:
self.request_interval = request_interval
@@ -108,9 +118,13 @@ class HealthCheck(object):
"Valid values for request_interval are: %s" %
",".join(str(i) for i in self.valid_request_intervals))
+ if failure_threshold < 1 or failure_threshold > 10:
+ raise AttributeError(
+ 'Valid values for failure_threshold are 1 - 10.')
+
def to_xml(self):
params = {
- 'ip_addr': self.ip_addr,
+ 'ip_addr_part': '',
'port': self.port,
'type': self.hc_type,
'resource_path': self.resource_path,
@@ -118,10 +132,14 @@ class HealthCheck(object):
'string_match_part': "",
'request_interval': (self.XMLRequestIntervalPart %
{'request_interval': self.request_interval}),
+ 'failure_threshold': self.failure_threshold,
}
if self.fqdn is not None:
params['fqdn_part'] = self.XMLFQDNPart % {'fqdn': self.fqdn}
+ if self.ip_addr:
+ params['ip_addr_part'] = self.XMLIpAddrPart % {'ip_addr': self.ip_addr}
+
if self.string_match is not None:
params['string_match_part'] = self.XMLStringMatchPart % {'string_match' : self.string_match}
diff --git a/awx/lib/site-packages/boto/route53/record.py b/awx/lib/site-packages/boto/route53/record.py
index e04e009e27..664739b855 100644
--- a/awx/lib/site-packages/boto/route53/record.py
+++ b/awx/lib/site-packages/boto/route53/record.py
@@ -67,7 +67,7 @@ class ResourceRecordSets(ResultSet):
def add_change(self, action, name, type, ttl=600,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
- health_check=None):
+ health_check=None, failover=None):
"""
Add a change request to the set.
@@ -121,20 +121,24 @@ class ResourceRecordSets(ResultSet):
for the latency-based routing
:type alias_evaluate_target_health: Boolean
- :param region: *Required for alias resource record sets* Indicates
- whether this Resource Record Set should respect the health status of
- any health checks associated with the ALIAS target record which it is
- linked to.
+ :param alias_evaluate_target_health: *Required for alias resource record sets* Indicates
+ whether this Resource Record Set should respect the health status of
+ any health checks associated with the ALIAS target record which it is
+ linked to.
:type health_check: str
:param health_check: Health check to associate with this record
+
+ :type failover: str
+ :param failover: *Failover resource record sets only* Whether this is the
+ primary or secondary resource record set.
"""
change = Record(name, type, ttl,
alias_hosted_zone_id=alias_hosted_zone_id,
alias_dns_name=alias_dns_name, identifier=identifier,
weight=weight, region=region,
alias_evaluate_target_health=alias_evaluate_target_health,
- health_check=health_check)
+ health_check=health_check, failover=failover)
self.changes.append([action, change])
return change
@@ -210,6 +214,11 @@ class Record(object):
%(region)s
"""
+ FailoverBody = """
+ %(identifier)s
+ %(failover)s
+ """
+
ResourceRecordsBody = """
%(ttl)s
@@ -232,7 +241,7 @@ class Record(object):
def __init__(self, name=None, type=None, ttl=600, resource_records=None,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
- health_check=None):
+ health_check=None, failover=None):
self.name = name
self.type = type
self.ttl = ttl
@@ -246,6 +255,7 @@ class Record(object):
self.region = region
self.alias_evaluate_target_health = alias_evaluate_target_health
self.health_check = health_check
+ self.failover = failover
def __repr__(self):
return '' % (self.name, self.type, self.to_print())
@@ -293,6 +303,9 @@ class Record(object):
elif self.identifier is not None and self.region is not None:
weight = self.RRRBody % {"identifier": self.identifier, "region":
self.region}
+ elif self.identifier is not None and self.failover is not None:
+ weight = self.FailoverBody % {"identifier": self.identifier, "failover":
+ self.failover}
health_check = ""
if self.health_check is not None:
@@ -322,6 +335,8 @@ class Record(object):
rr += ' (WRR id=%s, w=%s)' % (self.identifier, self.weight)
elif self.identifier is not None and self.region is not None:
rr += ' (LBR id=%s, region=%s)' % (self.identifier, self.region)
+ elif self.identifier is not None and self.failover is not None:
+ rr += ' (FAILOVER id=%s, failover=%s)' % (self.identifier, self.failover)
return rr
@@ -341,11 +356,15 @@ class Record(object):
elif name == 'SetIdentifier':
self.identifier = value
elif name == 'EvaluateTargetHealth':
- self.alias_evaluate_target_health = value
+ self.alias_evaluate_target_health = value.lower() == 'true'
elif name == 'Weight':
self.weight = value
elif name == 'Region':
self.region = value
+ elif name == 'Failover':
+ self.failover = value
+ elif name == 'HealthCheckId':
+ self.health_check = value
def startElement(self, name, attrs, connection):
return None
diff --git a/awx/lib/site-packages/boto/s3/__init__.py b/awx/lib/site-packages/boto/s3/__init__.py
index 271c104752..67d53e3bd8 100644
--- a/awx/lib/site-packages/boto/s3/__init__.py
+++ b/awx/lib/site-packages/boto/s3/__init__.py
@@ -49,7 +49,7 @@ def regions():
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
- from .connection import S3Connection
+ from boto.s3.connection import S3Connection
return get_regions(
's3',
region_cls=S3RegionInfo,
diff --git a/awx/lib/site-packages/boto/s3/acl.py b/awx/lib/site-packages/boto/s3/acl.py
index c54ddc6262..51613883e4 100644
--- a/awx/lib/site-packages/boto/s3/acl.py
+++ b/awx/lib/site-packages/boto/s3/acl.py
@@ -32,6 +32,7 @@ class Policy(object):
def __init__(self, parent=None):
self.parent = parent
+ self.namespace = None
self.acl = None
def __repr__(self):
@@ -50,6 +51,9 @@ class Policy(object):
return "" % ", ".join(grants)
def startElement(self, name, attrs, connection):
+ if name == 'AccessControlPolicy':
+ self.namespace = attrs.get('xmlns', None)
+ return None
if name == 'Owner':
self.owner = User(self)
return self.owner
@@ -68,7 +72,10 @@ class Policy(object):
setattr(self, name, value)
def to_xml(self):
- s = ''
+ if self.namespace is not None:
+ s = ''.format(self.namespace)
+ else:
+ s = ''
s += self.owner.to_xml()
s += self.acl.to_xml()
s += ''
diff --git a/awx/lib/site-packages/boto/s3/bucket.py b/awx/lib/site-packages/boto/s3/bucket.py
index ed40970388..355716b4fc 100644
--- a/awx/lib/site-packages/boto/s3/bucket.py
+++ b/awx/lib/site-packages/boto/s3/bucket.py
@@ -45,11 +45,10 @@ import boto.jsonresponse
import boto.utils
import xml.sax
import xml.sax.saxutils
-import StringIO
-import urllib
import re
import base64
from collections import defaultdict
+from boto.compat import BytesIO, six, StringIO, urllib
# as per http://goo.gl/BDuud (02/19/2011)
@@ -146,7 +145,7 @@ class Bucket(object):
response_headers=None, validate=True):
"""
Check to see if a particular key exists within the bucket. This
- method uses a HEAD request to check for the existance of the key.
+ method uses a HEAD request to check for the existence of the key.
Returns: An instance of a Key object or None
:param key_name: The name of the key to retrieve
@@ -187,8 +186,8 @@ class Bucket(object):
if version_id:
query_args_l.append('versionId=%s' % version_id)
if response_headers:
- for rk, rv in response_headers.iteritems():
- query_args_l.append('%s=%s' % (rk, urllib.quote(rv)))
+ for rk, rv in six.iteritems(response_headers):
+ query_args_l.append('%s=%s' % (rk, urllib.parse.quote(rv)))
key, resp = self._get_key_internal(key_name, headers, query_args_l)
return key
@@ -374,17 +373,19 @@ class Bucket(object):
if initial_query_string:
pairs.append(initial_query_string)
- for key, value in params.items():
+ for key, value in sorted(params.items(), key=lambda x: x[0]):
+ if value is None:
+ continue
key = key.replace('_', '-')
if key == 'maxkeys':
key = 'max-keys'
- if isinstance(value, unicode):
- value = value.encode('utf-8')
- if value is not None and value != '':
- pairs.append('%s=%s' % (
- urllib.quote(key),
- urllib.quote(str(value)
- )))
+ if not isinstance(value, six.string_types + (six.binary_type,)):
+ value = six.text_type(value)
+ if value != '':
+ pairs.append(u'%s=%s' % (
+ urllib.parse.quote(key),
+ urllib.parse.quote(value)
+ ))
return '&'.join(pairs)
@@ -402,6 +403,8 @@ class Bucket(object):
if response.status == 200:
rs = ResultSet(element_map)
h = handler.XmlHandler(rs, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return rs
else:
@@ -670,10 +673,10 @@ class Bucket(object):
count = 0
while count < 1000:
try:
- key = ikeys.next()
+ key = next(ikeys)
except StopIteration:
break
- if isinstance(key, basestring):
+ if isinstance(key, six.string_types):
key_name = key
version_id = None
elif isinstance(key, tuple) and len(key) == 2:
@@ -701,7 +704,7 @@ class Bucket(object):
if count <= 0:
return False # no more
data = data.encode('utf-8')
- fp = StringIO.StringIO(data)
+ fp = BytesIO(data)
md5 = boto.utils.compute_md5(fp)
hdrs['Content-MD5'] = md5[1]
hdrs['Content-Type'] = 'text/xml'
@@ -714,6 +717,8 @@ class Bucket(object):
body = response.read()
if response.status == 200:
h = handler.XmlHandler(result, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return count >= 1000 # more?
else:
@@ -851,7 +856,7 @@ class Bucket(object):
acl = src_bucket.get_xml_acl(src_key_name)
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
- src = '%s/%s' % (src_bucket_name, urllib.quote(src_key_name))
+ src = '%s/%s' % (src_bucket_name, urllib.parse.quote(src_key_name))
if src_version_id:
src += '?versionId=%s' % src_version_id
headers[provider.copy_source_header] = str(src)
@@ -870,6 +875,8 @@ class Bucket(object):
if response.status == 200:
key = self.new_key(new_key_name)
h = handler.XmlHandler(key, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
if hasattr(key, 'Error'):
raise provider.storage_copy_error(key.Code, key.Message, body)
@@ -918,8 +925,10 @@ class Bucket(object):
query_args='acl'):
if version_id:
query_args += '&versionId=%s' % version_id
+ if not isinstance(acl_str, bytes):
+ acl_str = acl_str.encode('utf-8')
response = self.connection.make_request('PUT', self.name, key_name,
- data=acl_str.encode('UTF-8'),
+ data=acl_str,
query_args=query_args,
headers=headers)
body = response.read()
@@ -946,6 +955,8 @@ class Bucket(object):
if response.status == 200:
policy = Policy(self)
h = handler.XmlHandler(policy, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return policy
else:
@@ -980,8 +991,10 @@ class Bucket(object):
query_args = subresource
if version_id:
query_args += '&versionId=%s' % version_id
+ if not isinstance(value, bytes):
+ value = value.encode('utf-8')
response = self.connection.make_request('PUT', self.name, key_name,
- data=value.encode('UTF-8'),
+ data=value,
query_args=query_args,
headers=headers)
body = response.read()
@@ -1124,6 +1137,8 @@ class Bucket(object):
if response.status == 200:
rs = ResultSet(self)
h = handler.XmlHandler(rs, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return rs.LocationConstraint
else:
@@ -1143,7 +1158,9 @@ class Bucket(object):
:rtype: bool
:return: True if ok or raises an exception.
"""
- body = logging_str.encode('utf-8')
+ body = logging_str
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
response = self.connection.make_request('PUT', self.name, data=body,
query_args='logging', headers=headers)
body = response.read()
@@ -1201,6 +1218,8 @@ class Bucket(object):
if response.status == 200:
blogging = BucketLogging()
h = handler.XmlHandler(blogging, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return blogging
else:
@@ -1303,6 +1322,8 @@ class Bucket(object):
response = self.connection.make_request('GET', self.name,
query_args='versioning', headers=headers)
body = response.read()
+ if not isinstance(body, six.string_types):
+ body = body.decode('utf-8')
boto.log.debug(body)
if response.status == 200:
d = {}
@@ -1326,8 +1347,8 @@ class Bucket(object):
to configure for this bucket.
"""
xml = lifecycle_config.to_xml()
- xml = xml.encode('utf-8')
- fp = StringIO.StringIO(xml)
+ #xml = xml.encode('utf-8')
+ fp = StringIO(xml)
md5 = boto.utils.compute_md5(fp)
if headers is None:
headers = {}
@@ -1359,6 +1380,8 @@ class Bucket(object):
if response.status == 200:
lifecycle = Lifecycle()
h = handler.XmlHandler(lifecycle, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return lifecycle
else:
@@ -1505,7 +1528,7 @@ class Bucket(object):
"""Get raw website configuration xml"""
response = self.connection.make_request('GET', self.name,
query_args='website', headers=headers)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status != 200:
@@ -1591,7 +1614,7 @@ class Bucket(object):
CORS configuration. See the S3 documentation for details
of the exact syntax required.
"""
- fp = StringIO.StringIO(cors_xml)
+ fp = StringIO(cors_xml)
md5 = boto.utils.compute_md5(fp)
if headers is None:
headers = {}
@@ -1735,6 +1758,8 @@ class Bucket(object):
if response.status == 200:
resp = MultiPartUpload(self)
h = handler.XmlHandler(resp, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return resp
else:
@@ -1754,7 +1779,7 @@ class Bucket(object):
query_args=query_args,
headers=headers, data=xml_body)
contains_error = False
- body = response.read()
+ body = response.read().decode('utf-8')
# Some errors will be reported in the body of the response
# even though the HTTP response code is 200. This check
# does a quick and dirty peek in the body for an error element.
@@ -1764,6 +1789,8 @@ class Bucket(object):
if response.status == 200 and not contains_error:
resp = CompleteMultiPartUpload(self)
h = handler.XmlHandler(resp, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
# Use a dummy key to parse various response headers
# for versioning, encryption info and then explicitly
@@ -1801,6 +1828,8 @@ class Bucket(object):
response = self.get_xml_tags()
tags = Tags()
h = handler.XmlHandler(tags, self)
+ if not isinstance(response, bytes):
+ response = response.encode('utf-8')
xml.sax.parseString(response, h)
return tags
@@ -1818,11 +1847,13 @@ class Bucket(object):
def set_xml_tags(self, tag_str, headers=None, query_args='tagging'):
if headers is None:
headers = {}
- md5 = boto.utils.compute_md5(StringIO.StringIO(tag_str))
+ md5 = boto.utils.compute_md5(StringIO(tag_str))
headers['Content-MD5'] = md5[1]
headers['Content-Type'] = 'text/xml'
+ if not isinstance(tag_str, bytes):
+ tag_str = tag_str.encode('utf-8')
response = self.connection.make_request('PUT', self.name,
- data=tag_str.encode('utf-8'),
+ data=tag_str,
query_args=query_args,
headers=headers)
body = response.read()
diff --git a/awx/lib/site-packages/boto/s3/bucketlogging.py b/awx/lib/site-packages/boto/s3/bucketlogging.py
index ab3683926a..38cef1140e 100644
--- a/awx/lib/site-packages/boto/s3/bucketlogging.py
+++ b/awx/lib/site-packages/boto/s3/bucketlogging.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
import xml.sax.saxutils
-from acl import Grant
+from boto.s3.acl import Grant
class BucketLogging(object):
diff --git a/awx/lib/site-packages/boto/s3/connection.py b/awx/lib/site-packages/boto/s3/connection.py
index d6b3b52f68..0fcc1f5957 100644
--- a/awx/lib/site-packages/boto/s3/connection.py
+++ b/awx/lib/site-packages/boto/s3/connection.py
@@ -23,8 +23,8 @@
# IN THE SOFTWARE.
import xml.sax
-import urllib
import base64
+from boto.compat import six, urllib
import time
from boto.auth import detect_potential_s3sigv4
@@ -92,11 +92,11 @@ class _CallingFormat(object):
path = ''
if bucket != '':
path = '/' + bucket
- return path + '/%s' % urllib.quote(key)
+ return path + '/%s' % urllib.parse.quote(key)
def build_path_base(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
- return '/%s' % urllib.quote(key)
+ return '/%s' % urllib.parse.quote(key)
class SubdomainCallingFormat(_CallingFormat):
@@ -123,7 +123,7 @@ class OrdinaryCallingFormat(_CallingFormat):
path_base = '/'
if bucket:
path_base += "%s/" % bucket
- return path_base + urllib.quote(key)
+ return path_base + urllib.parse.quote(key)
class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat):
@@ -176,7 +176,7 @@ class S3Connection(AWSAuthConnection):
if host is NoHostProvided:
no_host_provided = True
host = self.DefaultHost
- if isinstance(calling_format, basestring):
+ if isinstance(calling_format, six.string_types):
calling_format=boto.utils.find_class(calling_format)()
self.calling_format = calling_format
self.bucket_class = bucket_class
@@ -350,9 +350,38 @@ class S3Connection(AWSAuthConnection):
return {"action": url, "fields": fields}
+ def generate_url_sigv4(self, expires_in, method, bucket='', key='',
+ headers=None, force_http=False,
+ response_headers=None, version_id=None,
+ iso_date=None):
+ path = self.calling_format.build_path_base(bucket, key)
+ auth_path = self.calling_format.build_auth_path(bucket, key)
+ host = self.calling_format.build_host(self.server_name(), bucket)
+
+ # For presigned URLs we should ignore the port if it's HTTPS
+ if host.endswith(':443'):
+ host = host[:-4]
+
+ params = {}
+ if version_id is not None:
+ params['VersionId'] = version_id
+
+ http_request = self.build_base_http_request(method, path, auth_path,
+ headers=headers, host=host,
+ params=params)
+
+ return self._auth_handler.presign(http_request, expires_in,
+ iso_date=iso_date)
+
def generate_url(self, expires_in, method, bucket='', key='', headers=None,
query_auth=True, force_http=False, response_headers=None,
expires_in_absolute=False, version_id=None):
+ if self._auth_handler.capability[0] == 'hmac-v4-s3':
+ # Handle the special sigv4 case
+ return self.generate_url_sigv4(expires_in, method, bucket=bucket,
+ key=key, headers=headers, force_http=force_http,
+ response_headers=response_headers, version_id=version_id)
+
headers = headers or {}
if expires_in_absolute:
expires = int(expires_in)
@@ -367,7 +396,7 @@ class S3Connection(AWSAuthConnection):
extra_qp.append("versionId=%s" % version_id)
if response_headers:
for k, v in response_headers.items():
- extra_qp.append("%s=%s" % (k, urllib.quote(v)))
+ extra_qp.append("%s=%s" % (k, urllib.parse.quote(v)))
if self.provider.security_token:
headers['x-amz-security-token'] = self.provider.security_token
if extra_qp:
@@ -376,7 +405,7 @@ class S3Connection(AWSAuthConnection):
c_string = boto.utils.canonical_string(method, auth_path, headers,
expires, self.provider)
b64_hmac = self._auth_handler.sign_string(c_string)
- encoded_canonical = urllib.quote(b64_hmac, safe='')
+ encoded_canonical = urllib.parse.quote(b64_hmac, safe='')
self.calling_format.build_path_base(bucket, key)
if query_auth:
query_part = '?' + self.QueryString % (encoded_canonical, expires,
@@ -389,7 +418,7 @@ class S3Connection(AWSAuthConnection):
if k.startswith(hdr_prefix):
# headers used for sig generation must be
# included in the url also.
- extra_qp.append("%s=%s" % (k, urllib.quote(v)))
+ extra_qp.append("%s=%s" % (k, urllib.parse.quote(v)))
if extra_qp:
delimiter = '?' if not query_part else '&'
query_part += delimiter + '&'.join(extra_qp)
@@ -411,6 +440,8 @@ class S3Connection(AWSAuthConnection):
response.status, response.reason, body)
rs = ResultSet([('Bucket', self.bucket_class)])
h = handler.XmlHandler(rs, self)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
xml.sax.parseString(body, h)
return rs
diff --git a/awx/lib/site-packages/boto/s3/key.py b/awx/lib/site-packages/boto/s3/key.py
index ba20c41aac..9674d3565b 100644
--- a/awx/lib/site-packages/boto/s3/key.py
+++ b/awx/lib/site-packages/boto/s3/key.py
@@ -20,20 +20,19 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
-from __future__ import with_statement
+import email.utils
import errno
import hashlib
import mimetypes
import os
import re
-import rfc822
-import StringIO
import base64
import binascii
import math
-import urllib
+from hashlib import md5
import boto.utils
+from boto.compat import BytesIO, six, urllib, encodebytes
+
from boto.exception import BotoClientError
from boto.exception import StorageDataError
from boto.exception import PleaseRetryException
@@ -44,10 +43,6 @@ from boto import UserAgent
from boto.utils import compute_md5, compute_hash
from boto.utils import find_matching_headers
from boto.utils import merge_headers_by_name
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
class Key(object):
@@ -115,7 +110,7 @@ class Key(object):
self.is_latest = False
self.last_modified = None
self.owner = None
- self.storage_class = 'STANDARD'
+ self._storage_class = None
self.path = None
self.resp = None
self.mode = None
@@ -171,23 +166,46 @@ class Key(object):
def _get_base64md5(self):
if 'md5' in self.local_hashes and self.local_hashes['md5']:
- return binascii.b2a_base64(self.local_hashes['md5']).rstrip('\n')
+ md5 = self.local_hashes['md5']
+ if not isinstance(md5, bytes):
+ md5 = md5.encode('utf-8')
+ return binascii.b2a_base64(md5).decode('utf-8').rstrip('\n')
def _set_base64md5(self, value):
if value:
+ if not isinstance(value, six.string_types):
+ value = value.decode('utf-8')
self.local_hashes['md5'] = binascii.a2b_base64(value)
elif 'md5' in self.local_hashes:
del self.local_hashes['md5']
base64md5 = property(_get_base64md5, _set_base64md5);
+ def _get_storage_class(self):
+ if self._storage_class is None and self.bucket:
+ # Attempt to fetch storage class
+ list_items = list(self.bucket.list(self.name.encode('utf-8')))
+ if len(list_items) and getattr(list_items[0], '_storage_class',
+ None):
+ self._storage_class = list_items[0]._storage_class
+ else:
+ # Key is not yet saved? Just use default...
+ self._storage_class = 'STANDARD'
+
+ return self._storage_class
+
+ def _set_storage_class(self, value):
+ self._storage_class = value
+
+ storage_class = property(_get_storage_class, _set_storage_class)
+
def get_md5_from_hexdigest(self, md5_hexdigest):
"""
A utility function to create the 2-tuple (md5hexdigest, base64md5)
from just having a precalculated md5_hexdigest.
"""
digest = binascii.unhexlify(md5_hexdigest)
- base64md5 = base64.encodestring(digest)
+ base64md5 = encodebytes(digest)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return (md5_hexdigest, base64md5)
@@ -371,6 +389,9 @@ class Key(object):
raise StopIteration
return data
+ # Python 3 iterator support
+ __next__ = next
+
def read(self, size=0):
self.open_read()
if size == 0:
@@ -801,6 +822,10 @@ class Key(object):
chunk = fp.read(bytes_togo)
else:
chunk = fp.read(self.BufferSize)
+
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode('utf-8')
+
if spos is None:
# read at least something from a non-seekable fp.
self.read_from_stream = True
@@ -829,6 +854,9 @@ class Key(object):
else:
chunk = fp.read(self.BufferSize)
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode('utf-8')
+
self.size = data_len
for alg in digesters:
@@ -861,7 +889,9 @@ class Key(object):
for header in find_matching_headers('User-Agent', headers):
del headers[header]
headers['User-Agent'] = UserAgent
- if self.storage_class != 'STANDARD':
+ # If storage_class is None, then a user has not explicitly requested
+ # a storage class, so we can assume STANDARD here
+ if self._storage_class not in [None, 'STANDARD']:
headers[provider.storage_class_header] = self.storage_class
if find_matching_headers('Content-Encoding', headers):
self.content_encoding = merge_headers_by_name(
@@ -930,10 +960,20 @@ class Key(object):
if 200 <= response.status <= 299:
self.etag = response.getheader('etag')
+ md5 = self.md5
+ if isinstance(md5, bytes):
+ md5 = md5.decode('utf-8')
- if self.etag != '"%s"' % self.md5:
- raise provider.storage_data_error(
- 'ETag from S3 did not match computed MD5')
+ # If you use customer-provided encryption keys, the ETag value that
+ # Amazon S3 returns in the response will not be the MD5 of the
+ # object.
+ server_side_encryption_customer_algorithm = response.getheader(
+ 'x-amz-server-side-encryption-customer-algorithm', None)
+ if server_side_encryption_customer_algorithm is None:
+ if self.etag != '"%s"' % md5:
+ raise provider.storage_data_error(
+ 'ETag from S3 did not match computed MD5. '
+ '%s vs. %s' % (self.etag, self.md5))
return True
@@ -1371,9 +1411,9 @@ class Key(object):
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
"""
- if isinstance(string_data, unicode):
+ if not isinstance(string_data, bytes):
string_data = string_data.encode("utf-8")
- fp = StringIO.StringIO(string_data)
+ fp = BytesIO(string_data)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy,
encrypt_key=encrypt_key)
@@ -1418,7 +1458,7 @@ class Key(object):
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
-
+
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
@@ -1461,7 +1501,7 @@ class Key(object):
if response_headers:
for key in response_headers:
query_args.append('%s=%s' % (
- key, urllib.quote(response_headers[key])))
+ key, urllib.parse.quote(response_headers[key])))
query_args = '&'.join(query_args)
self.open('r', headers, query_args=query_args,
override_num_retries=override_num_retries)
@@ -1497,7 +1537,7 @@ class Key(object):
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
- except IOError, e:
+ except IOError as e:
if e.errno == errno.ENOSPC:
raise StorageDataError('Out of space for destination file '
'%s' % fp.name)
@@ -1647,7 +1687,7 @@ class Key(object):
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
-
+
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
@@ -1669,8 +1709,8 @@ class Key(object):
# if last_modified date was sent from s3, try to set file's timestamp
if self.last_modified is not None:
try:
- modified_tuple = rfc822.parsedate_tz(self.last_modified)
- modified_stamp = int(rfc822.mktime_tz(modified_tuple))
+ modified_tuple = email.utils.parsedate_tz(self.last_modified)
+ modified_stamp = int(email.utils.mktime_tz(modified_tuple))
os.utime(fp.name, (modified_stamp, modified_stamp))
except Exception:
pass
@@ -1679,7 +1719,7 @@ class Key(object):
cb=None, num_cb=10,
torrent=False,
version_id=None,
- response_headers=None):
+ response_headers=None, encoding=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Return the contents of the object as a string.
@@ -1721,14 +1761,24 @@ class Key(object):
``version_id`` attribute to None to always grab the latest
version from a version-enabled bucket.
- :rtype: string
- :returns: The contents of the file as a string
+ :type encoding: str
+ :param encoding: The text encoding to use, such as ``utf-8``
+ or ``iso-8859-1``. If set, then a string will be returned.
+ Defaults to ``None`` and returns bytes.
+
+ :rtype: bytes or str
+ :returns: The contents of the file as bytes or a string
"""
- fp = StringIO.StringIO()
+ fp = BytesIO()
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
- return fp.getvalue()
+ value = fp.getvalue()
+
+ if encoding is not None:
+ value = value.decode(encoding)
+
+ return value
def add_email_grant(self, permission, email_address, headers=None):
"""
diff --git a/awx/lib/site-packages/boto/s3/keyfile.py b/awx/lib/site-packages/boto/s3/keyfile.py
index 84858a2ba2..4245413d74 100644
--- a/awx/lib/site-packages/boto/s3/keyfile.py
+++ b/awx/lib/site-packages/boto/s3/keyfile.py
@@ -75,7 +75,7 @@ class KeyFile():
raise IOError('Invalid whence param (%d) passed to seek' % whence)
try:
self.key.open_read(headers={"Range": "bytes=%d-" % pos})
- except StorageResponseError, e:
+ except StorageResponseError as e:
# 416 Invalid Range means that the given starting byte was past the end
# of file. We catch this because the Python file interface allows silently
# seeking past the end of the file.
diff --git a/awx/lib/site-packages/boto/s3/lifecycle.py b/awx/lib/site-packages/boto/s3/lifecycle.py
index dadc1d3293..8ceb879570 100644
--- a/awx/lib/site-packages/boto/s3/lifecycle.py
+++ b/awx/lib/site-packages/boto/s3/lifecycle.py
@@ -19,7 +19,7 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
+from boto.compat import six
class Rule(object):
"""
@@ -48,7 +48,7 @@ class Rule(object):
self.id = id
self.prefix = '' if prefix is None else prefix
self.status = status
- if isinstance(expiration, (int, long)):
+ if isinstance(expiration, six.integer_types):
# retain backwards compatibility???
self.expiration = Expiration(days=expiration)
else:
diff --git a/awx/lib/site-packages/boto/s3/multipart.py b/awx/lib/site-packages/boto/s3/multipart.py
index ba89d7359c..056f9ca52a 100644
--- a/awx/lib/site-packages/boto/s3/multipart.py
+++ b/awx/lib/site-packages/boto/s3/multipart.py
@@ -22,8 +22,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-import user
-import key
+from boto.s3 import user
+from boto.s3 import key
from boto import handler
import xml.sax
diff --git a/awx/lib/site-packages/boto/s3/resumable_download_handler.py b/awx/lib/site-packages/boto/s3/resumable_download_handler.py
index 56e0ce3e8c..a2a88c74cd 100644
--- a/awx/lib/site-packages/boto/s3/resumable_download_handler.py
+++ b/awx/lib/site-packages/boto/s3/resumable_download_handler.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import errno
import httplib
import os
@@ -135,7 +134,7 @@ class ResumableDownloadHandler(object):
if len(self.etag_value_for_current_download) < self.MIN_ETAG_LEN:
print('Couldn\'t read etag in tracker file (%s). Restarting '
'download from scratch.' % self.tracker_file_name)
- except IOError, e:
+ except IOError as e:
# Ignore non-existent file (happens first time a download
# is attempted on an object), but warn user for other errors.
if e.errno != errno.ENOENT:
@@ -156,7 +155,7 @@ class ResumableDownloadHandler(object):
try:
f = open(self.tracker_file_name, 'w')
f.write('%s\n' % self.etag_value_for_current_download)
- except IOError, e:
+ except IOError as e:
raise ResumableDownloadException(
'Couldn\'t write tracker file (%s): %s.\nThis can happen'
'if you\'re using an incorrectly configured download tool\n'
@@ -194,17 +193,17 @@ class ResumableDownloadHandler(object):
key.size), ResumableTransferDisposition.ABORT)
elif cur_file_size == key.size:
if key.bucket.connection.debug >= 1:
- print 'Download complete.'
+ print('Download complete.')
return
if key.bucket.connection.debug >= 1:
- print 'Resuming download.'
+ print('Resuming download.')
headers = headers.copy()
headers['Range'] = 'bytes=%d-%d' % (cur_file_size, key.size - 1)
cb = ByteTranslatingCallbackHandler(cb, cur_file_size).call
self.download_start_point = cur_file_size
else:
if key.bucket.connection.debug >= 1:
- print 'Starting new resumable download.'
+ print('Starting new resumable download.')
self._save_tracker_info(key)
self.download_start_point = 0
# Truncate the file, in case a new resumable download is being
@@ -285,9 +284,9 @@ class ResumableDownloadHandler(object):
# non-resumable downloads, this call was removed. Checksum
# validation of file contents should be done by the caller.
if debug >= 1:
- print 'Resumable download complete.'
+ print('Resumable download complete.')
return
- except self.RETRYABLE_EXCEPTIONS, e:
+ except self.RETRYABLE_EXCEPTIONS as e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
@@ -301,7 +300,7 @@ class ResumableDownloadHandler(object):
else:
key.get_file(fp, headers, cb, num_cb, torrent, version_id,
override_num_retries=0)
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
if (e.disposition ==
ResumableTransferDisposition.ABORT_CUR_PROCESS):
if debug >= 1:
diff --git a/awx/lib/site-packages/boto/sdb/__init__.py b/awx/lib/site-packages/boto/sdb/__init__.py
index 6cb30050cd..1235a88a2e 100644
--- a/awx/lib/site-packages/boto/sdb/__init__.py
+++ b/awx/lib/site-packages/boto/sdb/__init__.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
#
-from .regioninfo import SDBRegionInfo
+from boto.sdb.regioninfo import SDBRegionInfo
from boto.regioninfo import get_regions
diff --git a/awx/lib/site-packages/boto/sdb/connection.py b/awx/lib/site-packages/boto/sdb/connection.py
index c7370b6745..fa7cb83e3e 100644
--- a/awx/lib/site-packages/boto/sdb/connection.py
+++ b/awx/lib/site-packages/boto/sdb/connection.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import xml.sax
import threading
import boto
@@ -235,9 +234,9 @@ class SDBConnection(AWSQueryConnection):
requests made on this specific connection instance. It is by
no means an account-wide estimate.
"""
- print 'Total Usage: %f compute seconds' % self.box_usage
+ print('Total Usage: %f compute seconds' % self.box_usage)
cost = self.box_usage * 0.14
- print 'Approximate Cost: $%f' % cost
+ print('Approximate Cost: $%f' % cost)
def get_domain(self, domain_name, validate=True):
"""
@@ -318,7 +317,7 @@ class SDBConnection(AWSQueryConnection):
:rtype: :class:`boto.sdb.domain.Domain` object
:return: The newly created domain
"""
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
d = self.get_object('CreateDomain', params, Domain)
d.name = domain_name
return d
@@ -361,7 +360,7 @@ class SDBConnection(AWSQueryConnection):
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
return self.get_status('DeleteDomain', params)
def domain_metadata(self, domain_or_name):
@@ -375,7 +374,7 @@ class SDBConnection(AWSQueryConnection):
:return: The newly created domain metadata object
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
d = self.get_object('DomainMetadata', params, DomainMetaData)
d.domain = domain
return d
@@ -421,8 +420,8 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
self._build_name_value_list(params, attributes, replace)
if expected_value:
self._build_expected_value(params, expected_value)
@@ -451,7 +450,7 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name}
+ params = {'DomainName': domain_name}
self._build_batch_list(params, items, replace)
return self.get_status('BatchPutAttributes', params, verb='POST')
@@ -484,8 +483,8 @@ class SDBConnection(AWSQueryConnection):
:return: An Item with the requested attribute name/values set on it
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
if consistent_read:
params['ConsistentRead'] = 'true'
if attribute_names:
@@ -545,8 +544,8 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
if attr_names:
if isinstance(attr_names, list):
self._build_name_list(params, attr_names)
@@ -578,7 +577,7 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name}
+ params = {'DomainName': domain_name}
self._build_batch_list(params, items, False)
return self.get_status('BatchDeleteAttributes', params, verb='POST')
@@ -606,7 +605,7 @@ class SDBConnection(AWSQueryConnection):
:return: An iterator containing the results.
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'SelectExpression' : query}
+ params = {'SelectExpression': query}
if consistent_read:
params['ConsistentRead'] = 'true'
if next_token:
@@ -614,6 +613,6 @@ class SDBConnection(AWSQueryConnection):
try:
return self.get_list('Select', params, [('Item', self.item_cls)],
parent=domain)
- except SDBResponseError, e:
+ except SDBResponseError as e:
e.body = "Query: %s\n%s" % (query, e.body)
raise e
diff --git a/awx/lib/site-packages/boto/sdb/db/blob.py b/awx/lib/site-packages/boto/sdb/db/blob.py
index b50794c961..6c286ec379 100644
--- a/awx/lib/site-packages/boto/sdb/db/blob.py
+++ b/awx/lib/site-packages/boto/sdb/db/blob.py
@@ -19,6 +19,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class Blob(object):
"""Blob object"""
@@ -37,19 +39,18 @@ class Blob(object):
return f
def __str__(self):
- return unicode(self).encode('utf-8')
+ return six.text_type(self).encode('utf-8')
def __unicode__(self):
if hasattr(self.file, "get_contents_as_string"):
value = self.file.get_contents_as_string()
else:
value = self.file.getvalue()
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
return value
else:
return value.decode('utf-8')
-
def read(self):
if hasattr(self.file, "get_contents_as_string"):
return self.file.get_contents_as_string()
@@ -60,7 +61,7 @@ class Blob(object):
return self.file.readline()
def next(self):
- return self.file.next()
+ return next(self.file)
def __iter__(self):
return iter(self.file)
diff --git a/awx/lib/site-packages/boto/sdb/db/key.py b/awx/lib/site-packages/boto/sdb/db/key.py
index 6ac47a68aa..42f6bc9b3a 100644
--- a/awx/lib/site-packages/boto/sdb/db/key.py
+++ b/awx/lib/site-packages/boto/sdb/db/key.py
@@ -54,6 +54,6 @@ class Key(object):
def parent(self):
raise NotImplementedError("Key parents are not currently supported")
-
+
def __str__(self):
return self.id_or_name()
diff --git a/awx/lib/site-packages/boto/sdb/db/manager/sdbmanager.py b/awx/lib/site-packages/boto/sdb/db/manager/sdbmanager.py
index 2613ff0867..d964d07a2d 100644
--- a/awx/lib/site-packages/boto/sdb/db/manager/sdbmanager.py
+++ b/awx/lib/site-packages/boto/sdb/db/manager/sdbmanager.py
@@ -28,6 +28,7 @@ from boto.sdb.db.blob import Blob
from boto.sdb.db.property import ListProperty, MapProperty
from datetime import datetime, date, time
from boto.exception import SDBPersistenceError, S3ResponseError
+from boto.compat import map, six, long_type
ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
@@ -58,7 +59,6 @@ class SDBConverter(object):
self.manager = manager
self.type_map = {bool: (self.encode_bool, self.decode_bool),
int: (self.encode_int, self.decode_int),
- long: (self.encode_long, self.decode_long),
float: (self.encode_float, self.decode_float),
self.model_class: (
self.encode_reference, self.decode_reference
@@ -70,6 +70,8 @@ class SDBConverter(object):
Blob: (self.encode_blob, self.decode_blob),
str: (self.encode_string, self.decode_string),
}
+ if six.PY2:
+ self.type_map[long] = (self.encode_long, self.decode_long)
def encode(self, item_type, value):
try:
@@ -193,12 +195,12 @@ class SDBConverter(object):
return int(value)
def encode_long(self, value):
- value = long(value)
+ value = long_type(value)
value += 9223372036854775808
return '%020d' % value
def decode_long(self, value):
- value = long(value)
+ value = long_type(value)
value -= 9223372036854775808
return value
@@ -264,7 +266,7 @@ class SDBConverter(object):
return float(mantissa + 'e' + exponent)
def encode_datetime(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if isinstance(value, datetime):
return value.strftime(ISO8601)
@@ -285,11 +287,11 @@ class SDBConverter(object):
else:
value = value.split("-")
return date(int(value[0]), int(value[1]), int(value[2]))
- except Exception, e:
+ except Exception:
return None
def encode_date(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
return value.isoformat()
@@ -322,7 +324,7 @@ class SDBConverter(object):
def encode_reference(self, value):
if value in (None, 'None', '', ' '):
return None
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
else:
return value.id
@@ -335,7 +337,7 @@ class SDBConverter(object):
def encode_blob(self, value):
if not value:
return None
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if not value.id:
@@ -364,7 +366,7 @@ class SDBConverter(object):
bucket = s3.get_bucket(match.group(1), validate=False)
try:
key = bucket.get_key(match.group(2))
- except S3ResponseError, e:
+ except S3ResponseError as e:
if e.reason != "Forbidden":
raise
return None
@@ -380,14 +382,14 @@ class SDBConverter(object):
if not isinstance(value, str):
return value
try:
- return unicode(value, 'utf-8')
+ return six.text_type(value, 'utf-8')
except:
# really, this should throw an exception.
# in the interest of not breaking current
# systems, however:
arr = []
for ch in value:
- arr.append(unichr(ord(ch)))
+ arr.append(six.unichr(ord(ch)))
return u"".join(arr)
def decode_string(self, value):
@@ -490,7 +492,7 @@ class SDBManager(object):
value = prop.make_value_from_datastore(value)
try:
setattr(obj, prop.name, value)
- except Exception, e:
+ except Exception as e:
boto.log.exception(e)
obj._loaded = True
@@ -522,7 +524,7 @@ class SDBManager(object):
query_str = "select * from `%s` %s" % (self.domain.name, self._build_filter_part(query.model_class, query.filters, query.sort_by, query.select))
if query.limit:
query_str += " limit %s" % query.limit
- rs = self.domain.select(query_str, max_items=query.limit, next_token = query.next_token)
+ rs = self.domain.select(query_str, max_items=query.limit, next_token=query.next_token)
query.rs = rs
return self._object_lister(query.model_class, rs)
@@ -581,7 +583,7 @@ class SDBManager(object):
order_by_filtered = True
query_parts.append("(%s)" % select)
- if isinstance(filters, basestring):
+ if isinstance(filters, six.string_types):
query = "WHERE %s AND `__type__` = '%s'" % (filters, cls.__name__)
if order_by in ["__id__", "itemName()"]:
query += " ORDER BY itemName() %s" % order_by_method
@@ -600,7 +602,7 @@ class SDBManager(object):
property = cls.find_property(name)
if name == order_by:
order_by_filtered = True
- if types.TypeType(value) == types.ListType:
+ if types.TypeType(value) == list:
filter_parts_sub = []
for val in value:
val = self.encode_value(property, val)
@@ -674,7 +676,7 @@ class SDBManager(object):
if property.unique:
try:
args = {property.name: value}
- obj2 = obj.find(**args).next()
+ obj2 = next(obj.find(**args))
if obj2.id != obj.id:
raise SDBPersistenceError("Error: %s must be unique!" % property.name)
except(StopIteration):
@@ -701,7 +703,7 @@ class SDBManager(object):
if prop.unique:
try:
args = {prop.name: value}
- obj2 = obj.find(**args).next()
+ obj2 = next(obj.find(**args))
if obj2.id != obj.id:
raise SDBPersistenceError("Error: %s must be unique!" % prop.name)
except(StopIteration):
diff --git a/awx/lib/site-packages/boto/sdb/db/manager/xmlmanager.py b/awx/lib/site-packages/boto/sdb/db/manager/xmlmanager.py
index 2cfcd13278..f457347ad3 100644
--- a/awx/lib/site-packages/boto/sdb/db/manager/xmlmanager.py
+++ b/awx/lib/site-packages/boto/sdb/db/manager/xmlmanager.py
@@ -22,6 +22,7 @@ import boto
from boto.utils import find_class, Password
from boto.sdb.db.key import Key
from boto.sdb.db.model import Model
+from boto.compat import six, encodebytes
from datetime import datetime
from xml.dom.minidom import getDOMImplementation, parse, parseString, Node
@@ -43,11 +44,12 @@ class XMLConverter(object):
self.manager = manager
self.type_map = { bool : (self.encode_bool, self.decode_bool),
int : (self.encode_int, self.decode_int),
- long : (self.encode_long, self.decode_long),
Model : (self.encode_reference, self.decode_reference),
Key : (self.encode_reference, self.decode_reference),
Password : (self.encode_password, self.decode_password),
datetime : (self.encode_datetime, self.decode_datetime)}
+ if six.PY2:
+ self.type_map[long] = (self.encode_long, self.decode_long)
def get_text_value(self, parent_node):
value = ''
@@ -145,7 +147,7 @@ class XMLConverter(object):
return None
def encode_reference(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if value is None:
return ''
@@ -201,9 +203,8 @@ class XMLManager(object):
self.enable_ssl = enable_ssl
self.auth_header = None
if self.db_user:
- import base64
- base64string = base64.encodestring('%s:%s' % (self.db_user, self.db_passwd))[:-1]
- authheader = "Basic %s" % base64string
+ base64string = encodebytes('%s:%s' % (self.db_user, self.db_passwd))[:-1]
+ authheader = "Basic %s" % base64string
self.auth_header = authheader
def _connect(self):
@@ -373,7 +374,7 @@ class XMLManager(object):
for property in properties:
if property.name == name:
found = True
- if types.TypeType(value) == types.ListType:
+ if types.TypeType(value) == list:
filter_parts = []
for val in value:
val = self.encode_value(property, val)
@@ -459,14 +460,14 @@ class XMLManager(object):
elif isinstance(value, Node):
prop_node.appendChild(value)
else:
- text_node = doc.createTextNode(unicode(value).encode("ascii", "ignore"))
+ text_node = doc.createTextNode(six.text_type(value).encode("ascii", "ignore"))
prop_node.appendChild(text_node)
obj_node.appendChild(prop_node)
return doc
def unmarshal_object(self, fp, cls=None, id=None):
- if isinstance(fp, basestring):
+ if isinstance(fp, six.string_types):
doc = parseString(fp)
else:
doc = parse(fp)
@@ -477,7 +478,7 @@ class XMLManager(object):
Same as unmarshalling an object, except it returns
from "get_props_from_doc"
"""
- if isinstance(fp, basestring):
+ if isinstance(fp, six.string_types):
doc = parseString(fp)
else:
doc = parse(fp)
@@ -488,7 +489,7 @@ class XMLManager(object):
return self._make_request("DELETE", url)
def set_key_value(self, obj, name, value):
- self.domain.put_attributes(obj.id, {name : value}, replace=True)
+ self.domain.put_attributes(obj.id, {name: value}, replace=True)
def delete_key_value(self, obj, name):
self.domain.delete_attributes(obj.id, name)
@@ -514,4 +515,3 @@ class XMLManager(object):
obj = obj.get_by_id(obj.id)
obj._loaded = True
return obj
-
diff --git a/awx/lib/site-packages/boto/sdb/db/model.py b/awx/lib/site-packages/boto/sdb/db/model.py
index 9e589d523f..741ad43871 100644
--- a/awx/lib/site-packages/boto/sdb/db/model.py
+++ b/awx/lib/site-packages/boto/sdb/db/model.py
@@ -23,6 +23,7 @@ from boto.sdb.db.property import Property
from boto.sdb.db.key import Key
from boto.sdb.db.query import Query
import boto
+from boto.compat import filter
class ModelMeta(type):
"Metaclass for all Models"
@@ -166,7 +167,7 @@ class Model(object):
# so if it fails we just revert to it's default value
try:
setattr(self, key, kw[key])
- except Exception, e:
+ except Exception as e:
boto.log.exception(e)
def __repr__(self):
@@ -254,9 +255,9 @@ class Model(object):
props = {}
for prop in self.properties(hidden=False):
props[prop.name] = getattr(self, prop.name)
- obj = {'properties' : props,
- 'id' : self.id}
- return {self.__class__.__name__ : obj}
+ obj = {'properties': props,
+ 'id': self.id}
+ return {self.__class__.__name__: obj}
def to_xml(self, doc=None):
xmlmanager = self.get_xmlmanager()
@@ -293,5 +294,3 @@ class Expando(Model):
object.__setattr__(self, name, value)
return value
raise AttributeError
-
-
diff --git a/awx/lib/site-packages/boto/sdb/db/property.py b/awx/lib/site-packages/boto/sdb/db/property.py
index 7488c2c30b..575aa8924d 100644
--- a/awx/lib/site-packages/boto/sdb/db/property.py
+++ b/awx/lib/site-packages/boto/sdb/db/property.py
@@ -20,13 +20,14 @@
# IN THE SOFTWARE.
import datetime
-from key import Key
+from boto.sdb.db.key import Key
from boto.utils import Password
from boto.sdb.db.query import Query
import re
import boto
import boto.s3.key
from boto.sdb.db.blob import Blob
+from boto.compat import six, long_type
class Property(object):
@@ -76,7 +77,7 @@ class Property(object):
self.slot_name = '_' + self.name
def default_validator(self, value):
- if isinstance(value, basestring) or value == self.default_value():
+ if isinstance(value, six.string_types) or value == self.default_value():
return
if not isinstance(value, self.data_type):
raise TypeError('Validation Error, %s.%s expecting %s, got %s' % (self.model_class.__name__, self.name, self.data_type, type(value)))
@@ -87,7 +88,7 @@ class Property(object):
def validate(self, value):
if self.required and value is None:
raise ValueError('%s is a required property' % self.name)
- if self.choices and value and not value in self.choices:
+ if self.choices and value and value not in self.choices:
raise ValueError('%s not a valid choice for %s.%s' % (value, self.model_class.__name__, self.name))
if self.validator:
self.validator(value)
@@ -113,7 +114,7 @@ class Property(object):
def validate_string(value):
if value is None:
return
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
if len(value) > 1024:
raise ValueError('Length of value greater than maxlength')
else:
@@ -144,7 +145,7 @@ class TextProperty(Property):
def validate(self, value):
value = super(TextProperty, self).validate(value)
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
raise TypeError('Expecting Text, got %s' % type(value))
if self.max_length and len(value) > self.max_length:
raise ValueError('Length of value greater than maxlength %s' % self.max_length)
@@ -335,7 +336,7 @@ class IntegerProperty(Property):
class LongProperty(Property):
- data_type = long
+ data_type = long_type
type_name = 'Long'
def __init__(self, verbose_name=None, name=None, default=0, required=False,
@@ -343,7 +344,7 @@ class LongProperty(Property):
super(LongProperty, self).__init__(verbose_name, name, default, required, validator, choices, unique)
def validate(self, value):
- value = long(value)
+ value = long_type(value)
value = super(LongProperty, self).validate(value)
min = -9223372036854775808
max = 9223372036854775807
@@ -493,7 +494,7 @@ class ReferenceProperty(Property):
# If the value is still the UUID for the referenced object, we need to create
# the object now that is the attribute has actually been accessed. This lazy
# instantiation saves unnecessary roundtrips to SimpleDB
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self.reference_class(value)
setattr(obj, self.name, value)
return value
@@ -537,7 +538,7 @@ class ReferenceProperty(Property):
raise ValueError('%s is a required property' % self.name)
if value == self.default_value():
return
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
self.check_instance(value)
@@ -626,16 +627,16 @@ class ListProperty(Property):
if not isinstance(value, list):
value = [value]
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
- if item_type == (int, long):
+ if item_type == six.integer_types:
raise ValueError('Items in the %s list must all be integers.' % self.name)
else:
raise ValueError('Items in the %s list must all be %s instances' %
@@ -650,10 +651,10 @@ class ListProperty(Property):
def __set__(self, obj, value):
"""Override the set method to allow them to set the property to an instance of the item_type instead of requiring a list to be passed in"""
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
if isinstance(value, item_type):
@@ -680,16 +681,16 @@ class MapProperty(Property):
if not isinstance(value, dict):
raise ValueError('Value must of type dict')
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
for key in value:
if not isinstance(value[key], item_type):
- if item_type == (int, long):
+ if item_type == six.integer_types:
raise ValueError('Values in the %s Map must all be integers.' % self.name)
else:
raise ValueError('Values in the %s Map must all be %s instances' %
diff --git a/awx/lib/site-packages/boto/sdb/db/query.py b/awx/lib/site-packages/boto/sdb/db/query.py
index bd1a41dd90..8945d4c0aa 100644
--- a/awx/lib/site-packages/boto/sdb/db/query.py
+++ b/awx/lib/site-packages/boto/sdb/db/query.py
@@ -1,3 +1,4 @@
+from boto.compat import six
# Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -41,7 +42,7 @@ class Query(object):
def next(self):
if self.__local_iter__ is None:
self.__local_iter__ = self.__iter__()
- return self.__local_iter__.next()
+ return next(self.__local_iter__)
def filter(self, property_operator, value):
self.filters.append((property_operator, value))
@@ -63,7 +64,7 @@ class Query(object):
def order(self, key):
self.sort_by = key
return self
-
+
def to_xml(self, doc=None):
if not doc:
xmlmanager = self.model_class.get_xmlmanager()
diff --git a/awx/lib/site-packages/boto/sdb/db/sequence.py b/awx/lib/site-packages/boto/sdb/db/sequence.py
index 70540c52f1..a28798930c 100644
--- a/awx/lib/site-packages/boto/sdb/db/sequence.py
+++ b/awx/lib/site-packages/boto/sdb/db/sequence.py
@@ -20,6 +20,7 @@
# IN THE SOFTWARE.
from boto.exception import SDBResponseError
+from boto.compat import six
class SequenceGenerator(object):
"""Generic Sequence Generator object, this takes a single
@@ -71,8 +72,7 @@ class SequenceGenerator(object):
def _inc(self, val):
"""Increment a single value"""
assert(len(val) == self.sequence_length)
- return self.sequence_string[(self.sequence_string.index(val)+1) % len(self.sequence_string)]
-
+ return self.sequence_string[(self.sequence_string.index(val) + 1) % len(self.sequence_string)]
#
@@ -100,19 +100,17 @@ def fib(cv=1, lv=0):
increment_string = SequenceGenerator("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
-
class Sequence(object):
"""A simple Sequence using the new SDB "Consistent" features
Based largly off of the "Counter" example from mitch garnaat:
http://bitbucket.org/mitch/stupidbototricks/src/tip/counter.py"""
-
def __init__(self, id=None, domain_name=None, fnc=increment_by_one, init_val=None):
- """Create a new Sequence, using an optional function to
+ """Create a new Sequence, using an optional function to
increment to the next number, by default we just increment by one.
Every parameter here is optional, if you don't specify any options
then you'll get a new SequenceGenerator with a random ID stored in the
- default domain that increments by one and uses the default botoweb
+ default domain that increments by one and uses the default botoweb
environment
:param id: Optional ID (name) for this counter
@@ -127,7 +125,7 @@ class Sequence(object):
Your function must accept "None" to get the initial value
:type fnc: function, str
- :param init_val: Initial value, by default this is the first element in your sequence,
+ :param init_val: Initial value, by default this is the first element in your sequence,
but you can pass in any value, even a string if you pass in a function that uses
strings instead of ints to increment
"""
@@ -146,7 +144,7 @@ class Sequence(object):
self.item_type = type(fnc(None))
self.timestamp = None
# Allow us to pass in a full name to a function
- if isinstance(fnc, basestring):
+ if isinstance(fnc, six.string_types):
from boto.utils import find_class
fnc = find_class(fnc)
self.fnc = fnc
@@ -169,7 +167,7 @@ class Sequence(object):
try:
self.db.put_attributes(self.id, new_val, expected_value=expected_value)
self.timestamp = new_val['timestamp']
- except SDBResponseError, e:
+ except SDBResponseError as e:
if e.status == 409:
raise ValueError("Sequence out of sync")
else:
@@ -208,7 +206,7 @@ class Sequence(object):
self.domain_name = boto.config.get("DB", "sequence_db", boto.config.get("DB", "db_name", "default"))
try:
self._db = sdb.get_domain(self.domain_name)
- except SDBResponseError, e:
+ except SDBResponseError as e:
if e.status == 400:
self._db = sdb.create_domain(self.domain_name)
else:
diff --git a/awx/lib/site-packages/boto/sdb/db/test_db.py b/awx/lib/site-packages/boto/sdb/db/test_db.py
index b582bcee6d..ba2fb3cd5f 100644
--- a/awx/lib/site-packages/boto/sdb/db/test_db.py
+++ b/awx/lib/site-packages/boto/sdb/db/test_db.py
@@ -109,7 +109,7 @@ def test_float():
assert tt.name == t.name
assert tt.value == t.value
return t
-
+
def test_required():
global _objects
t = TestRequired()
diff --git a/awx/lib/site-packages/boto/sdb/domain.py b/awx/lib/site-packages/boto/sdb/domain.py
index 6f0489e2ad..faed813326 100644
--- a/awx/lib/site-packages/boto/sdb/domain.py
+++ b/awx/lib/site-packages/boto/sdb/domain.py
@@ -18,11 +18,14 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from __future__ import print_function
"""
Represents an SDB Domain
"""
+
from boto.sdb.queryresultset import SelectResultSet
+from boto.compat import six
class Domain(object):
@@ -240,26 +243,26 @@ class Domain(object):
if not f:
from tempfile import TemporaryFile
f = TemporaryFile()
- print >> f, ''
- print >> f, '' % self.name
+ print('', file=f)
+ print('' % self.name, file=f)
for item in self:
- print >> f, '\t- ' % item.name
+ print('\t
- ' % item.name, file=f)
for k in item:
- print >> f, '\t\t' % k
+ print('\t\t' % k, file=f)
values = item[k]
if not isinstance(values, list):
values = [values]
for value in values:
- print >> f, '\t\t\t> f, ']]>'
- print >> f, '\t\t'
- print >> f, '\t
'
- print >> f, ' '
+ print(']]>', file=f)
+ print('\t\t', file=f)
+ print('\t', file=f)
+ print('', file=f)
f.flush()
f.seek(0)
return f
@@ -370,8 +373,8 @@ class UploaderThread(Thread):
try:
self.db.batch_put_attributes(self.items)
except:
- print "Exception using batch put, trying regular put instead"
+ print("Exception using batch put, trying regular put instead")
for item_name in self.items:
self.db.put_attributes(item_name, self.items[item_name])
- print ".",
+ print(".", end=' ')
sys.stdout.flush()
diff --git a/awx/lib/site-packages/boto/sdb/item.py b/awx/lib/site-packages/boto/sdb/item.py
index a742d80ca8..e09a9d9a2c 100644
--- a/awx/lib/site-packages/boto/sdb/item.py
+++ b/awx/lib/site-packages/boto/sdb/item.py
@@ -27,10 +27,10 @@ class Item(dict):
SimpleDB item. An item in SDB is similar to a row in a relational
database. Items belong to a :py:class:`Domain `,
which is similar to a table in a relational database.
-
+
The keys on instances of this object correspond to attributes that are
- stored on the SDB item.
-
+ stored on the SDB item.
+
.. tip:: While it is possible to instantiate this class directly, you may
want to use the convenience methods on :py:class:`boto.sdb.domain.Domain`
for that purpose. For example, :py:meth:`boto.sdb.domain.Domain.get_item`.
@@ -39,9 +39,9 @@ class Item(dict):
"""
:type domain: :py:class:`boto.sdb.domain.Domain`
:param domain: The domain that this item belongs to.
-
+
:param str name: The name of this item. This name will be used when
- querying for items using methods like
+ querying for items using methods like
:py:meth:`boto.sdb.domain.Domain.get_item`
"""
dict.__init__(self)
@@ -102,8 +102,8 @@ class Item(dict):
def load(self):
"""
Loads or re-loads this item's attributes from SDB.
-
- .. warning::
+
+ .. warning::
If you have changed attribute values on an Item instance,
this method will over-write the values if they are different in
SDB. For any local attributes that don't yet exist in SDB,
@@ -114,7 +114,7 @@ class Item(dict):
def save(self, replace=True):
"""
Saves this item to SDB.
-
+
:param bool replace: If ``True``, delete any attributes on the remote
SDB item that have a ``None`` value on this object.
"""
@@ -134,11 +134,11 @@ class Item(dict):
attribute that has yet to be set, it will simply create an attribute
named ``key`` with your given ``value`` as its value. If you are
adding a value to an existing attribute, this method will convert the
- attribute to a list (if it isn't already) and append your new value
+ attribute to a list (if it isn't already) and append your new value
to said list.
-
+
For clarification, consider the following interactive session:
-
+
.. code-block:: python
>>> item = some_domain.get_item('some_item')
@@ -150,9 +150,9 @@ class Item(dict):
>>> item.add_value('some_attr', 2)
>>> item['some_attr']
[1, 2]
-
+
:param str key: The attribute to add a value to.
- :param object value: The value to set or append to the attribute.
+ :param object value: The value to set or append to the attribute.
"""
if key in self:
# We already have this key on the item.
@@ -170,12 +170,8 @@ class Item(dict):
def delete(self):
"""
Deletes this item in SDB.
-
+
.. note:: This local Python object remains in its current state
after deletion, this only deletes the remote item in SDB.
"""
self.domain.delete_item(self)
-
-
-
-
diff --git a/awx/lib/site-packages/boto/sdb/queryresultset.py b/awx/lib/site-packages/boto/sdb/queryresultset.py
index 9ff0ae2f56..54f35238b6 100644
--- a/awx/lib/site-packages/boto/sdb/queryresultset.py
+++ b/awx/lib/site-packages/boto/sdb/queryresultset.py
@@ -1,3 +1,4 @@
+from boto.compat import six
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -89,4 +90,4 @@ class SelectResultSet(object):
more_results = self.next_token is not None
def next(self):
- return self.__iter__().next()
+ return next(self.__iter__())
diff --git a/awx/lib/site-packages/boto/services/bs.py b/awx/lib/site-packages/boto/services/bs.py
index 3d700315db..396c483975 100644
--- a/awx/lib/site-packages/boto/services/bs.py
+++ b/awx/lib/site-packages/boto/services/bs.py
@@ -24,7 +24,8 @@ from boto.services.servicedef import ServiceDef
from boto.services.submit import Submitter
from boto.services.result import ResultProcessor
import boto
-import sys, os, StringIO
+import sys, os
+from boto.compat import StringIO
class BS(object):
@@ -62,32 +63,32 @@ class BS(object):
help="batch identifier required by the retrieve command")
def print_command_help(self):
- print '\nCommands:'
+ print('\nCommands:')
for key in self.Commands.keys():
- print ' %s\t\t%s' % (key, self.Commands[key])
+ print(' %s\t\t%s' % (key, self.Commands[key]))
def do_reset(self):
iq = self.sd.get_obj('input_queue')
if iq:
- print 'clearing out input queue'
+ print('clearing out input queue')
i = 0
m = iq.read()
while m:
i += 1
iq.delete_message(m)
m = iq.read()
- print 'deleted %d messages' % i
+ print('deleted %d messages' % i)
ob = self.sd.get_obj('output_bucket')
ib = self.sd.get_obj('input_bucket')
if ob:
if ib and ob.name == ib.name:
return
- print 'delete generated files in output bucket'
+ print('delete generated files in output bucket')
i = 0
for k in ob:
i += 1
k.delete()
- print 'deleted %d keys' % i
+ print('deleted %d keys' % i)
def do_submit(self):
if not self.options.path:
@@ -97,8 +98,8 @@ class BS(object):
s = Submitter(self.sd)
t = s.submit_path(self.options.path, None, self.options.ignore, None,
None, True, self.options.path)
- print 'A total of %d files were submitted' % t[1]
- print 'Batch Identifier: %s' % t[0]
+ print('A total of %d files were submitted' % t[1])
+ print('Batch Identifier: %s' % t[0])
def do_start(self):
ami_id = self.sd.get('ami_id')
@@ -111,7 +112,7 @@ class BS(object):
self.sd.add_section('Credentials')
self.sd.set('Credentials', 'aws_access_key_id', ec2.aws_access_key_id)
self.sd.set('Credentials', 'aws_secret_access_key', ec2.aws_secret_access_key)
- s = StringIO.StringIO()
+ s = StringIO()
self.sd.write(s)
rs = ec2.get_all_images([ami_id])
img = rs[0]
@@ -119,15 +120,15 @@ class BS(object):
max_count=self.options.num_instances,
instance_type=instance_type,
security_groups=[security_group])
- print 'Starting AMI: %s' % ami_id
- print 'Reservation %s contains the following instances:' % r.id
+ print('Starting AMI: %s' % ami_id)
+ print('Reservation %s contains the following instances:' % r.id)
for i in r.instances:
- print '\t%s' % i.id
+ print('\t%s' % i.id)
def do_status(self):
iq = self.sd.get_obj('input_queue')
if iq:
- print 'The input_queue (%s) contains approximately %s messages' % (iq.id, iq.count())
+ print('The input_queue (%s) contains approximately %s messages' % (iq.id, iq.count()))
ob = self.sd.get_obj('output_bucket')
ib = self.sd.get_obj('input_bucket')
if ob:
@@ -136,7 +137,7 @@ class BS(object):
total = 0
for k in ob:
total += 1
- print 'The output_bucket (%s) contains %d keys' % (ob.name, total)
+ print('The output_bucket (%s) contains %d keys' % (ob.name, total))
def do_retrieve(self):
if not self.options.path:
@@ -151,10 +152,10 @@ class BS(object):
def do_batches(self):
d = self.sd.get_obj('output_domain')
if d:
- print 'Available Batches:'
+ print('Available Batches:')
rs = d.query("['type'='Batch']")
for item in rs:
- print ' %s' % item.name
+ print(' %s' % item.name)
else:
self.parser.error('No output_domain specified for service')
diff --git a/awx/lib/site-packages/boto/services/result.py b/awx/lib/site-packages/boto/services/result.py
index 5f6d800d91..879934323b 100644
--- a/awx/lib/site-packages/boto/services/result.py
+++ b/awx/lib/site-packages/boto/services/result.py
@@ -19,7 +19,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import os
from datetime import datetime, timedelta
from boto.utils import parse_ts
@@ -84,7 +83,7 @@ class ResultProcessor(object):
key_name = output.split(';')[0]
key = bucket.lookup(key_name)
file_name = os.path.join(path, key_name)
- print 'retrieving file: %s to %s' % (key_name, file_name)
+ print('retrieving file: %s to %s' % (key_name, file_name))
key.get_contents_to_filename(file_name)
self.num_files += 1
@@ -105,10 +104,10 @@ class ResultProcessor(object):
def get_results_from_bucket(self, path):
bucket = self.sd.get_obj('output_bucket')
if bucket:
- print 'No output queue or domain, just retrieving files from output_bucket'
+ print('No output queue or domain, just retrieving files from output_bucket')
for key in bucket:
file_name = os.path.join(path, key)
- print 'retrieving file: %s to %s' % (key, file_name)
+ print('retrieving file: %s to %s' % (key, file_name))
key.get_contents_to_filename(file_name)
self.num_files + 1
@@ -123,14 +122,14 @@ class ResultProcessor(object):
self.get_results_from_bucket(path)
if self.log_fp:
self.log_fp.close()
- print '%d results successfully retrieved.' % self.num_files
+ print('%d results successfully retrieved.' % self.num_files)
if self.num_files > 0:
self.avg_time = float(self.total_time)/self.num_files
- print 'Minimum Processing Time: %d' % self.min_time.seconds
- print 'Maximum Processing Time: %d' % self.max_time.seconds
- print 'Average Processing Time: %f' % self.avg_time
+ print('Minimum Processing Time: %d' % self.min_time.seconds)
+ print('Maximum Processing Time: %d' % self.max_time.seconds)
+ print('Average Processing Time: %f' % self.avg_time)
self.elapsed_time = self.latest_time-self.earliest_time
- print 'Elapsed Time: %d' % self.elapsed_time.seconds
+ print('Elapsed Time: %d' % self.elapsed_time.seconds)
tput = 1.0 / ((self.elapsed_time.seconds/60.0) / self.num_files)
- print 'Throughput: %f transactions / minute' % tput
+ print('Throughput: %f transactions / minute' % tput)
diff --git a/awx/lib/site-packages/boto/services/submit.py b/awx/lib/site-packages/boto/services/submit.py
index 2bc72241c3..69be236adb 100644
--- a/awx/lib/site-packages/boto/services/submit.py
+++ b/awx/lib/site-packages/boto/services/submit.py
@@ -18,7 +18,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-
import time
import os
@@ -77,12 +76,12 @@ class Submitter(object):
for file in files:
fullpath = os.path.join(root, file)
if status:
- print 'Submitting %s' % fullpath
+ print('Submitting %s' % fullpath)
self.submit_file(fullpath, metadata, cb, num_cb, prefix)
total += 1
elif os.path.isfile(path):
self.submit_file(path, metadata, cb, num_cb)
total += 1
else:
- print 'problem with %s' % path
+ print('problem with %s' % path)
return (metadata['Batch'], total)
diff --git a/awx/lib/site-packages/boto/ses/__init__.py b/awx/lib/site-packages/boto/ses/__init__.py
index 81d4206d79..ee32451699 100644
--- a/awx/lib/site-packages/boto/ses/__init__.py
+++ b/awx/lib/site-packages/boto/ses/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from connection import SESConnection
+from boto.ses.connection import SESConnection
from boto.regioninfo import RegionInfo, get_regions
diff --git a/awx/lib/site-packages/boto/ses/connection.py b/awx/lib/site-packages/boto/ses/connection.py
index df115232d8..ed69ad29b5 100644
--- a/awx/lib/site-packages/boto/ses/connection.py
+++ b/awx/lib/site-packages/boto/ses/connection.py
@@ -20,9 +20,9 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import re
-import urllib
import base64
+from boto.compat import six, urllib
from boto.connection import AWSAuthConnection
from boto.exception import BotoServerError
from boto.regioninfo import RegionInfo
@@ -71,7 +71,7 @@ class SESConnection(AWSAuthConnection):
:type label: string
:param label: The parameter list's name
"""
- if isinstance(items, basestring):
+ if isinstance(items, six.string_types):
items = [items]
for i in range(1, len(items) + 1):
params['%s.%d' % (label, i)] = items[i - 1]
@@ -92,16 +92,16 @@ class SESConnection(AWSAuthConnection):
params['Action'] = action
for k, v in params.items():
- if isinstance(v, unicode): # UTF-8 encode only if it's Unicode
+ if isinstance(v, six.text_type): # UTF-8 encode only if it's Unicode
params[k] = v.encode('utf-8')
response = super(SESConnection, self).make_request(
'POST',
'/',
headers=headers,
- data=urllib.urlencode(params)
+ data=urllib.parse.urlencode(params)
)
- body = response.read()
+ body = response.read().decode('utf-8')
if response.status == 200:
list_markers = ('VerifiedEmailAddresses', 'Identities',
'DkimTokens', 'VerificationAttributes',
@@ -306,7 +306,7 @@ class SESConnection(AWSAuthConnection):
"""
- if isinstance(raw_message, unicode):
+ if isinstance(raw_message, six.text_type):
raw_message = raw_message.encode('utf-8')
params = {
@@ -521,3 +521,46 @@ class SESConnection(AWSAuthConnection):
return self._make_request('DeleteIdentity', {
'Identity': identity,
})
+
+ def set_identity_notification_topic(self, identity, notification_type, sns_topic=None):
+ """Sets an SNS topic to publish bounce or complaint notifications for
+ emails sent with the given identity as the Source. Publishing to topics
+ may only be disabled when feedback forwarding is enabled.
+
+ :type identity: string
+ :param identity: An email address or domain name.
+
+ :type notification_type: string
+ :param notification_type: The type of feedback notifications that will
+ be published to the specified topic.
+ Valid Values: Bounce | Complaint | Delivery
+
+ :type sns_topic: string or None
+ :param sns_topic: The Amazon Resource Name (ARN) of the Amazon Simple
+ Notification Service (Amazon SNS) topic.
+ """
+ params = {
+ 'Identity': identity,
+ 'NotificationType': notification_type
+ }
+ if sns_topic:
+ params['SnsTopic'] = sns_topic
+ return self._make_request('SetIdentityNotificationTopic', params)
+
+ def set_identity_feedback_forwarding_enabled(self, identity, forwarding_enabled=True):
+ """
+ Enables or disables SES feedback notification via email.
+ Feedback forwarding may only be disabled when both complaint and
+ bounce topics are set.
+
+ :type identity: string
+ :param identity: An email address or domain name.
+
+ :type forwarding_enabled: bool
+ :param forwarding_enabled: Specifies whether or not to enable feedback forwarding.
+ """
+ return self._make_request('SetIdentityFeedbackForwardingEnabled', {
+ 'Identity': identity,
+ 'ForwardingEnabled': 'true' if forwarding_enabled else 'false'
+ })
+
diff --git a/awx/lib/site-packages/boto/sns/__init__.py b/awx/lib/site-packages/boto/sns/__init__.py
index 1517f5f18a..2fb882cc41 100644
--- a/awx/lib/site-packages/boto/sns/__init__.py
+++ b/awx/lib/site-packages/boto/sns/__init__.py
@@ -22,7 +22,7 @@
# this is here for backward compatibility
# originally, the SNSConnection class was defined here
-from connection import SNSConnection
+from boto.sns.connection import SNSConnection
from boto.regioninfo import RegionInfo, get_regions
diff --git a/awx/lib/site-packages/boto/sns/connection.py b/awx/lib/site-packages/boto/sns/connection.py
index c98793f5bd..5a6da205f4 100644
--- a/awx/lib/site-packages/boto/sns/connection.py
+++ b/awx/lib/site-packages/boto/sns/connection.py
@@ -96,7 +96,7 @@ class SNSConnection(AWSQueryConnection):
:param name: name of the serialized parameter
"""
items = sorted(dictionary.items(), key=lambda x:x[0])
- for kv, index in zip(items, range(1, len(items)+1)):
+ for kv, index in zip(items, list(range(1, len(items)+1))):
key, value = kv
prefix = '%s.entry.%s' % (name, index)
params['%s.key' % prefix] = key
@@ -214,7 +214,7 @@ class SNSConnection(AWSQueryConnection):
return self._make_request('DeleteTopic', params, '/', 'GET')
def publish(self, topic=None, message=None, subject=None, target_arn=None,
- message_structure=None):
+ message_structure=None, message_attributes=None):
"""
Get properties of a Topic
@@ -233,6 +233,23 @@ class SNSConnection(AWSQueryConnection):
matches the structure described at
http://docs.aws.amazon.com/sns/latest/dg/PublishTopic.html#sns-message-formatting-by-protocol
+ :type message_attributes: dict
+ :param message_attributes: Message attributes to set. Should be
+ of the form:
+
+ .. code-block:: python
+
+ {
+ "name1": {
+ "data_type": "Number",
+ "string_value": "42"
+ },
+ "name2": {
+ "data_type": "String",
+ "string_value": "Bob"
+ }
+ }
+
:type subject: string
:param subject: Optional parameter to be used as the "Subject"
line of the email notifications.
@@ -256,6 +273,20 @@ class SNSConnection(AWSQueryConnection):
params['TargetArn'] = target_arn
if message_structure is not None:
params['MessageStructure'] = message_structure
+ if message_attributes is not None:
+ keys = sorted(message_attributes.keys())
+ for i, name in enumerate(keys, start=1):
+ attribute = message_attributes[name]
+ params['MessageAttributes.entry.{0}.Name'.format(i)] = name
+ if 'data_type' in attribute:
+ params['MessageAttributes.entry.{0}.Value.DataType'.format(i)] = \
+ attribute['data_type']
+ if 'string_value' in attribute:
+ params['MessageAttributes.entry.{0}.Value.StringValue'.format(i)] = \
+ attribute['string_value']
+ if 'binary_value' in attribute:
+ params['MessageAttributes.entry.{0}.Value.BinaryValue'.format(i)] = \
+ attribute['binary_value']
return self._make_request('Publish', params, '/', 'POST')
def subscribe(self, topic, protocol, endpoint):
@@ -313,7 +344,7 @@ class SNSConnection(AWSQueryConnection):
"""
t = queue.id.split('/')
q_arn = queue.arn
- sid = hashlib.md5(topic + q_arn).hexdigest()
+ sid = hashlib.md5((topic + q_arn).encode('utf-8')).hexdigest()
sid_exists = False
resp = self.subscribe(topic, 'sqs', q_arn)
attr = queue.get_attributes('Policy')
@@ -724,7 +755,7 @@ class SNSConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb=verb,
path=path, params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/awx/lib/site-packages/boto/sqs/__init__.py b/awx/lib/site-packages/boto/sqs/__init__.py
index 526a34cc73..29bd3cfdf1 100644
--- a/awx/lib/site-packages/boto/sqs/__init__.py
+++ b/awx/lib/site-packages/boto/sqs/__init__.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
#
-from regioninfo import SQSRegionInfo
+from boto.sqs.regioninfo import SQSRegionInfo
from boto.regioninfo import get_regions
diff --git a/awx/lib/site-packages/boto/sqs/connection.py b/awx/lib/site-packages/boto/sqs/connection.py
index 8fc69306bf..f4341e3297 100644
--- a/awx/lib/site-packages/boto/sqs/connection.py
+++ b/awx/lib/site-packages/boto/sqs/connection.py
@@ -122,12 +122,18 @@ class SQSConnection(AWSQueryConnection):
supplied, the default is to return all attributes. Valid
attributes are:
+ * All
* ApproximateNumberOfMessages
* ApproximateNumberOfMessagesNotVisible
* VisibilityTimeout
* CreatedTimestamp
* LastModifiedTimestamp
* Policy
+ * MaximumMessageSize
+ * MessageRetentionPeriod
+ * QueueArn
+ * ApproximateNumberOfMessagesDelayed
+ * DelaySeconds
* ReceiveMessageWaitTimeSeconds
* RedrivePolicy
@@ -144,7 +150,7 @@ class SQSConnection(AWSQueryConnection):
def receive_message(self, queue, number_messages=1,
visibility_timeout=None, attributes=None,
- wait_time_seconds=None):
+ wait_time_seconds=None, message_attributes=None):
"""
Read messages from an SQS Queue.
@@ -177,6 +183,11 @@ class SQSConnection(AWSQueryConnection):
If a message is available, the call will return sooner than
wait_time_seconds.
+ :type message_attributes: list
+ :param message_attributes: The name(s) of additional message
+ attributes to return. The default is to return no additional
+ message attributes. Use ``['All']`` or ``['.*']`` to return all.
+
:rtype: list
:return: A list of :class:`boto.sqs.message.Message` objects.
@@ -188,6 +199,9 @@ class SQSConnection(AWSQueryConnection):
self.build_list_params(params, attributes, 'AttributeName')
if wait_time_seconds is not None:
params['WaitTimeSeconds'] = wait_time_seconds
+ if message_attributes is not None:
+ self.build_list_params(params, message_attributes,
+ 'MessageAttributeName')
return self.get_list('ReceiveMessage', params,
[('Message', queue.message_class)],
queue.id, queue)
@@ -244,10 +258,62 @@ class SQSConnection(AWSQueryConnection):
params = {'ReceiptHandle' : receipt_handle}
return self.get_status('DeleteMessage', params, queue.id)
- def send_message(self, queue, message_content, delay_seconds=None):
+ def send_message(self, queue, message_content, delay_seconds=None,
+ message_attributes=None):
+ """
+ Send a new message to the queue.
+
+ :type queue: A :class:`boto.sqs.queue.Queue` object.
+ :param queue: The Queue to which the messages will be written.
+
+ :type message_content: string
+ :param message_content: The body of the message
+
+ :type delay_seconds: int
+ :param delay_seconds: Number of seconds (0 - 900) to delay this
+ message from being processed.
+
+ :type message_attributes: dict
+ :param message_attributes: Message attributes to set. Should be
+ of the form:
+
+ {
+ "name1": {
+ "data_type": "Number",
+ "string_value": "1"
+ },
+ "name2": {
+ "data_type": "String",
+ "string_value": "Bob"
+ }
+ }
+
+ """
params = {'MessageBody' : message_content}
if delay_seconds:
params['DelaySeconds'] = int(delay_seconds)
+
+ if message_attributes is not None:
+ keys = sorted(message_attributes.keys())
+ for i, name in enumerate(keys, start=1):
+ attribute = message_attributes[name]
+ params['MessageAttribute.%s.Name' % i] = name
+ if 'data_type' in attribute:
+ params['MessageAttribute.%s.Value.DataType' % i] = \
+ attribute['data_type']
+ if 'string_value' in attribute:
+ params['MessageAttribute.%s.Value.StringValue' % i] = \
+ attribute['string_value']
+ if 'binary_value' in attribute:
+ params['MessageAttribute.%s.Value.BinaryValue' % i] = \
+ attribute['binary_value']
+ if 'string_list_value' in attribute:
+ params['MessageAttribute.%s.Value.StringListValue' % i] = \
+ attribute['string_list_value']
+ if 'binary_list_value' in attribute:
+ params['MessageAttribute.%s.Value.BinaryListValue' % i] = \
+ attribute['binary_list_value']
+
return self.get_object('SendMessage', params, Message,
queue.id, verb='POST')
@@ -263,19 +329,45 @@ class SQSConnection(AWSQueryConnection):
tuple represents a single message to be written
and consists of and ID (string) that must be unique
within the list of messages, the message body itself
- which can be a maximum of 64K in length, and an
+ which can be a maximum of 64K in length, an
integer which represents the delay time (in seconds)
for the message (0-900) before the message will
- be delivered to the queue.
+ be delivered to the queue, and an optional dict of
+ message attributes like those passed to ``send_message``
+ above.
+
"""
params = {}
for i, msg in enumerate(messages):
- p_name = 'SendMessageBatchRequestEntry.%i.Id' % (i+1)
- params[p_name] = msg[0]
- p_name = 'SendMessageBatchRequestEntry.%i.MessageBody' % (i+1)
- params[p_name] = msg[1]
- p_name = 'SendMessageBatchRequestEntry.%i.DelaySeconds' % (i+1)
- params[p_name] = msg[2]
+ base = 'SendMessageBatchRequestEntry.%i' % (i + 1)
+ params['%s.Id' % base] = msg[0]
+ params['%s.MessageBody' % base] = msg[1]
+ params['%s.DelaySeconds' % base] = msg[2]
+ if len(msg) > 3:
+ base += '.MessageAttribute'
+ keys = sorted(msg[3].keys())
+ for j, name in enumerate(keys):
+ attribute = msg[3][name]
+
+ p_name = '%s.%i.Name' % (base, j + 1)
+ params[p_name] = name
+
+ if 'data_type' in attribute:
+ p_name = '%s.%i.DataType' % (base, j + 1)
+ params[p_name] = attribute['data_type']
+ if 'string_value' in attribute:
+ p_name = '%s.%i.StringValue' % (base, j + 1)
+ params[p_name] = attribute['string_value']
+ if 'binary_value' in attribute:
+ p_name = '%s.%i.BinaryValue' % (base, j + 1)
+ params[p_name] = attribute['binary_value']
+ if 'string_list_value' in attribute:
+ p_name = '%s.%i.StringListValue' % (base, j + 1)
+ params[p_name] = attribute['string_list_value']
+ if 'binary_list_value' in attribute:
+ p_name = '%s.%i.BinaryListValue' % (base, j + 1)
+ params[p_name] = attribute['binary_list_value']
+
return self.get_object('SendMessageBatch', params, BatchResults,
queue.id, verb='POST')
diff --git a/awx/lib/site-packages/boto/sqs/jsonmessage.py b/awx/lib/site-packages/boto/sqs/jsonmessage.py
index 0eb3a13621..520eb8eb06 100644
--- a/awx/lib/site-packages/boto/sqs/jsonmessage.py
+++ b/awx/lib/site-packages/boto/sqs/jsonmessage.py
@@ -32,7 +32,7 @@ class JSONMessage(MHMessage):
def decode(self, value):
try:
- value = base64.b64decode(value)
+ value = base64.b64decode(value.encode('utf-8')).decode('utf-8')
value = json.loads(value)
except:
raise SQSDecodeError('Unable to decode message', self)
@@ -40,4 +40,4 @@ class JSONMessage(MHMessage):
def encode(self, value):
value = json.dumps(value)
- return base64.b64encode(value)
+ return base64.b64encode(value.encode('utf-8')).decode('utf-8')
diff --git a/awx/lib/site-packages/boto/sqs/message.py b/awx/lib/site-packages/boto/sqs/message.py
index ce7976c1a8..04d7cec74e 100644
--- a/awx/lib/site-packages/boto/sqs/message.py
+++ b/awx/lib/site-packages/boto/sqs/message.py
@@ -64,11 +64,14 @@ in the format in which it would be stored in SQS.
"""
import base64
-import StringIO
-from boto.sqs.attributes import Attributes
-from boto.exception import SQSDecodeError
+
import boto
+from boto.compat import StringIO
+from boto.sqs.attributes import Attributes
+from boto.sqs.messageattributes import MessageAttributes
+from boto.exception import SQSDecodeError
+
class RawMessage(object):
"""
Base class for SQS messages. RawMessage does not encode the message
@@ -84,6 +87,8 @@ class RawMessage(object):
self.receipt_handle = None
self.md5 = None
self.attributes = Attributes(self)
+ self.message_attributes = MessageAttributes(self)
+ self.md5_message_attributes = None
def __len__(self):
return len(self.encode(self._body))
@@ -91,6 +96,8 @@ class RawMessage(object):
def startElement(self, name, attrs, connection):
if name == 'Attribute':
return self.attributes
+ if name == 'MessageAttribute':
+ return self.message_attributes
return None
def endElement(self, name, value, connection):
@@ -100,8 +107,10 @@ class RawMessage(object):
self.id = value
elif name == 'ReceiptHandle':
self.receipt_handle = value
- elif name == 'MD5OfMessageBody':
+ elif name == 'MD5OfBody':
self.md5 = value
+ elif name == 'MD5OfMessageAttributes':
+ self.md5_message_attributes = value
else:
setattr(self, name, value)
@@ -154,11 +163,11 @@ class Message(RawMessage):
"""
def encode(self, value):
- return base64.b64encode(value)
+ return base64.b64encode(value.encode('utf-8')).decode('utf-8')
def decode(self, value):
try:
- value = base64.b64decode(value)
+ value = base64.b64decode(value.encode('utf-8')).decode('utf-8')
except:
boto.log.warning('Unable to decode message')
return value
@@ -184,7 +193,7 @@ class MHMessage(Message):
def decode(self, value):
try:
msg = {}
- fp = StringIO.StringIO(value)
+ fp = StringIO(value)
line = fp.readline()
while line:
delim = line.find(':')
@@ -248,12 +257,12 @@ class EncodedMHMessage(MHMessage):
def decode(self, value):
try:
- value = base64.b64decode(value)
+ value = base64.b64decode(value.encode('utf-8')).decode('utf-8')
except:
raise SQSDecodeError('Unable to decode message', self)
return super(EncodedMHMessage, self).decode(value)
def encode(self, value):
value = super(EncodedMHMessage, self).encode(value)
- return base64.b64encode(value)
+ return base64.b64encode(value.encode('utf-8')).decode('utf-8')
diff --git a/awx/lib/site-packages/boto/sqs/messageattributes.py b/awx/lib/site-packages/boto/sqs/messageattributes.py
new file mode 100644
index 0000000000..7e61bf3668
--- /dev/null
+++ b/awx/lib/site-packages/boto/sqs/messageattributes.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2014 Amazon.com, Inc. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Represents an SQS MessageAttribute Name/Value set
+"""
+
+class MessageAttributes(dict):
+ def __init__(self, parent):
+ self.parent = parent
+ self.current_key = None
+ self.current_value = None
+
+ def startElement(self, name, attrs, connection):
+ if name == 'Value':
+ self.current_value = MessageAttributeValue(self)
+ return self.current_value
+
+ def endElement(self, name, value, connection):
+ if name == 'MessageAttribute':
+ self[self.current_key] = self.current_value
+ elif name == 'Name':
+ self.current_key = value
+ elif name == 'Value':
+ pass
+ else:
+ setattr(self, name, value)
+
+
+class MessageAttributeValue(dict):
+ def __init__(self, parent):
+ self.parent = parent
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'DataType':
+ self['data_type'] = value
+ elif name == 'StringValue':
+ self['string_value'] = value
+ elif name == 'BinaryValue':
+ self['binary_value'] = value
+ elif name == 'StringListValue':
+ self['string_list_value'] = value
+ elif name == 'BinaryListValue':
+ self['binary_list_value'] = value
diff --git a/awx/lib/site-packages/boto/sqs/queue.py b/awx/lib/site-packages/boto/sqs/queue.py
index 054b839e33..6bcae48977 100644
--- a/awx/lib/site-packages/boto/sqs/queue.py
+++ b/awx/lib/site-packages/boto/sqs/queue.py
@@ -22,8 +22,7 @@
"""
Represents an SQS Queue
"""
-
-import urlparse
+from boto.compat import urllib
from boto.sqs.message import Message
@@ -40,7 +39,7 @@ class Queue(object):
def _id(self):
if self.url:
- val = urlparse.urlparse(self.url)[2]
+ val = urllib.parse.urlparse(self.url)[2]
else:
val = self.url
return val
@@ -48,7 +47,7 @@ class Queue(object):
def _name(self):
if self.url:
- val = urlparse.urlparse(self.url)[2].split('/')[2]
+ val = urllib.parse.urlparse(self.url)[2].split('/')[2]
else:
val = self.url
return val
@@ -182,7 +181,8 @@ class Queue(object):
"""
return self.connection.remove_permission(self, label)
- def read(self, visibility_timeout=None, wait_time_seconds=None):
+ def read(self, visibility_timeout=None, wait_time_seconds=None,
+ message_attributes=None):
"""
Read a single message from the queue.
@@ -195,11 +195,17 @@ class Queue(object):
If a message is available, the call will return sooner than
wait_time_seconds.
+ :type message_attributes: list
+ :param message_attributes: The name(s) of additional message
+ attributes to return. The default is to return no additional
+ message attributes. Use ``['All']`` or ``['.*']`` to return all.
+
:rtype: :class:`boto.sqs.message.Message`
:return: A single message or None if queue is empty
"""
rs = self.get_messages(1, visibility_timeout,
- wait_time_seconds=wait_time_seconds)
+ wait_time_seconds=wait_time_seconds,
+ message_attributes=message_attributes)
if len(rs) == 1:
return rs[0]
else:
@@ -216,8 +222,8 @@ class Queue(object):
:return: The :class:`boto.sqs.message.Message` object that was written.
"""
new_msg = self.connection.send_message(self,
- message.get_body_encoded(),
- delay_seconds)
+ message.get_body_encoded(), delay_seconds=delay_seconds,
+ message_attributes=message.message_attributes)
message.id = new_msg.id
message.md5 = new_msg.md5
return message
@@ -231,10 +237,12 @@ class Queue(object):
tuple represents a single message to be written
and consists of and ID (string) that must be unique
within the list of messages, the message body itself
- which can be a maximum of 64K in length, and an
+ which can be a maximum of 64K in length, an
integer which represents the delay time (in seconds)
for the message (0-900) before the message will
- be delivered to the queue.
+ be delivered to the queue, and an optional dict of
+ message attributes like those passed to ``send_message``
+ in the connection class.
"""
return self.connection.send_message_batch(self, messages)
@@ -254,7 +262,8 @@ class Queue(object):
# get a variable number of messages, returns a list of messages
def get_messages(self, num_messages=1, visibility_timeout=None,
- attributes=None, wait_time_seconds=None):
+ attributes=None, wait_time_seconds=None,
+ message_attributes=None):
"""
Get a variable number of messages.
@@ -278,13 +287,19 @@ class Queue(object):
If a message is available, the call will return sooner than
wait_time_seconds.
+ :type message_attributes: list
+ :param message_attributes: The name(s) of additional message
+ attributes to return. The default is to return no additional
+ message attributes. Use ``['All']`` or ``['.*']`` to return all.
+
:rtype: list
:return: A list of :class:`boto.sqs.message.Message` objects.
"""
return self.connection.receive_message(
self, number_messages=num_messages,
visibility_timeout=visibility_timeout, attributes=attributes,
- wait_time_seconds=wait_time_seconds)
+ wait_time_seconds=wait_time_seconds,
+ message_attributes=message_attributes)
def delete_message(self, message):
"""
@@ -459,7 +474,7 @@ class Queue(object):
m = Message(self, body)
self.write(m)
n += 1
- print 'writing message %d' % n
+ print('writing message %d' % n)
body = ''
else:
body = body + l
diff --git a/awx/lib/site-packages/boto/sts/__init__.py b/awx/lib/site-packages/boto/sts/__init__.py
index a130b7fed9..156975ecd1 100644
--- a/awx/lib/site-packages/boto/sts/__init__.py
+++ b/awx/lib/site-packages/boto/sts/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from connection import STSConnection
+from boto.sts.connection import STSConnection
from boto.regioninfo import RegionInfo, get_regions
diff --git a/awx/lib/site-packages/boto/sts/connection.py b/awx/lib/site-packages/boto/sts/connection.py
index 4672c7c88f..59526222f0 100644
--- a/awx/lib/site-packages/boto/sts/connection.py
+++ b/awx/lib/site-packages/boto/sts/connection.py
@@ -23,8 +23,8 @@
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
-from credentials import Credentials, FederationToken, AssumedRole
-from credentials import DecodeAuthorizationMessage
+from boto.sts.credentials import Credentials, FederationToken, AssumedRole
+from boto.sts.credentials import DecodeAuthorizationMessage
import boto
import boto.utils
import datetime
diff --git a/awx/lib/site-packages/boto/sts/credentials.py b/awx/lib/site-packages/boto/sts/credentials.py
index 21828db77c..7ab631942c 100644
--- a/awx/lib/site-packages/boto/sts/credentials.py
+++ b/awx/lib/site-packages/boto/sts/credentials.py
@@ -113,10 +113,10 @@ class Credentials(object):
the credentials contained in the file, the permissions
of the file will be set to readable/writable by owner only.
"""
- fp = open(file_path, 'wb')
+ fp = open(file_path, 'w')
json.dump(self.to_dict(), fp)
fp.close()
- os.chmod(file_path, 0600)
+ os.chmod(file_path, 0o600)
def is_expired(self, time_offset_seconds=0):
"""
diff --git a/awx/lib/site-packages/boto/support/layer1.py b/awx/lib/site-packages/boto/support/layer1.py
index c4e18da030..b1c3ea016b 100644
--- a/awx/lib/site-packages/boto/support/layer1.py
+++ b/awx/lib/site-packages/boto/support/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.support import exceptions
+from boto.compat import json
class SupportConnection(AWSQueryConnection):
@@ -577,7 +573,7 @@ class SupportConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -588,4 +584,3 @@ class SupportConnection(AWSQueryConnection):
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
-
diff --git a/awx/lib/site-packages/boto/swf/layer1.py b/awx/lib/site-packages/boto/swf/layer1.py
index 73a809b2b2..bba16ad270 100644
--- a/awx/lib/site-packages/boto/swf/layer1.py
+++ b/awx/lib/site-packages/boto/swf/layer1.py
@@ -96,7 +96,7 @@ class Layer1(AWSAuthConnection):
:type data: dict
:param data: Specifies request parameters with default values to be removed.
"""
- for item in data.keys():
+ for item in list(data.keys()):
if isinstance(data[item], dict):
cls._normalize_request_dict(data[item])
if data[item] in (None, {}):
@@ -130,7 +130,7 @@ class Layer1(AWSAuthConnection):
{}, headers, body, None)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/awx/lib/site-packages/boto/swf/layer1_decisions.py b/awx/lib/site-packages/boto/swf/layer1_decisions.py
index 7649da1729..3f5f74af40 100644
--- a/awx/lib/site-packages/boto/swf/layer1_decisions.py
+++ b/awx/lib/site-packages/boto/swf/layer1_decisions.py
@@ -167,7 +167,7 @@ class Layer1Decisions(object):
if task_list is not None:
attrs['taskList'] = {'name': task_list}
if start_to_close_timeout is not None:
- attrs['startToCloseTimeout'] = start_to_close_timeout
+ attrs['taskStartToCloseTimeout'] = start_to_close_timeout
if workflow_type_version is not None:
attrs['workflowTypeVersion'] = workflow_type_version
self._data.append(o)
diff --git a/awx/lib/site-packages/boto/swf/layer2.py b/awx/lib/site-packages/boto/swf/layer2.py
index fbb9f33d08..5ad1c8d344 100644
--- a/awx/lib/site-packages/boto/swf/layer2.py
+++ b/awx/lib/site-packages/boto/swf/layer2.py
@@ -23,6 +23,7 @@ class SWFBase(object):
domain = None
aws_access_key_id = None
aws_secret_access_key = None
+ region = None
def __init__(self, **kwargs):
# Set default credentials.
@@ -33,8 +34,9 @@ class SWFBase(object):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
- self._swf = Layer1(self.aws_access_key_id,
- self.aws_secret_access_key)
+ self._swf = Layer1(self.aws_access_key_id,
+ self.aws_secret_access_key,
+ region=self.region)
def __repr__(self):
rep_str = str(self.name)
diff --git a/awx/lib/site-packages/boto/utils.py b/awx/lib/site-packages/boto/utils.py
index 18d34f659d..dd5f095629 100644
--- a/awx/lib/site-packages/boto/utils.py
+++ b/awx/lib/site-packages/boto/utils.py
@@ -40,11 +40,8 @@ Some handy utility functions used by several classes.
"""
import socket
-import urllib
-import urllib2
import imp
import subprocess
-import StringIO
import time
import logging.handlers
import boto
@@ -60,19 +57,14 @@ import email.mime.text
import email.utils
import email.encoders
import gzip
-import base64
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
+import threading
+import locale
+from boto.compat import six, StringIO, urllib, encodebytes
+from contextlib import contextmanager
-try:
- import hashlib
- _hashfn = hashlib.sha512
-except ImportError:
- import md5
- _hashfn = md5.md5
+from hashlib import md5, sha512
+_hashfn = sha512
from boto.compat import json
@@ -111,7 +103,7 @@ def unquote_v(nv):
if len(nv) == 1:
return nv
else:
- return (nv[0], urllib.unquote(nv[1]))
+ return (nv[0], urllib.parse.unquote(nv[1]))
def canonical_string(method, path, headers, expires=None,
@@ -164,7 +156,7 @@ def canonical_string(method, path, headers, expires=None,
qsa = [a.split('=', 1) for a in qsa]
qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest]
if len(qsa) > 0:
- qsa.sort(cmp=lambda x, y: cmp(x[0], y[0]))
+ qsa.sort(key=lambda x: x[0])
qsa = ['='.join(a) for a in qsa]
buf += '?'
buf += '&'.join(qsa)
@@ -195,11 +187,14 @@ def get_aws_metadata(headers, provider=None):
metadata = {}
for hkey in headers.keys():
if hkey.lower().startswith(metadata_prefix):
- val = urllib.unquote_plus(headers[hkey])
- try:
- metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8')
- except UnicodeDecodeError:
- metadata[hkey[len(metadata_prefix):]] = val
+ val = urllib.parse.unquote(headers[hkey])
+ if isinstance(val, bytes):
+ try:
+ val = val.decode('utf-8')
+ except UnicodeDecodeError:
+ # Just leave the value as-is
+ pass
+ metadata[hkey[len(metadata_prefix):]] = val
del headers[hkey]
return metadata
@@ -213,26 +208,28 @@ def retry_url(url, retry_on_404=True, num_retries=10):
"""
for i in range(0, num_retries):
try:
- proxy_handler = urllib2.ProxyHandler({})
- opener = urllib2.build_opener(proxy_handler)
- req = urllib2.Request(url)
+ proxy_handler = urllib.request.ProxyHandler({})
+ opener = urllib.request.build_opener(proxy_handler)
+ req = urllib.request.Request(url)
r = opener.open(req)
result = r.read()
+
+ if(not isinstance(result, six.string_types) and
+ hasattr(result, 'decode')):
+ result = result.decode('utf-8')
+
return result
- except urllib2.HTTPError, e:
- # in 2.6 you use getcode(), in 2.5 and earlier you use code
- if hasattr(e, 'getcode'):
- code = e.getcode()
- else:
- code = e.code
+ except urllib.error.HTTPError as e:
+ code = e.getcode()
if code == 404 and not retry_on_404:
return ''
- except Exception, e:
+ except Exception as e:
pass
boto.log.exception('Caught exception reading instance data')
# If not on the last iteration of the loop then sleep.
if i + 1 != num_retries:
- time.sleep(2 ** i)
+ time.sleep(min(2 ** i,
+ boto.config.get('Boto', 'max_retry_delay', 60)))
boto.log.error('Unable to read instance data, giving up')
return ''
@@ -280,12 +277,13 @@ class LazyLoadMetadata(dict):
if key in self._leaves:
resource = self._leaves[key]
+ last_exception = None
for i in range(0, self._num_retries):
try:
val = boto.utils.retry_url(
- self._url + urllib.quote(resource,
- safe="/:"),
+ self._url + urllib.parse.quote(resource,
+ safe="/:"),
num_retries=self._num_retries)
if val and val[0] == '{':
val = json.loads(val)
@@ -296,31 +294,35 @@ class LazyLoadMetadata(dict):
val = val.split('\n')
break
- except JSONDecodeError, e:
+ except JSONDecodeError as e:
boto.log.debug(
"encountered '%s' exception: %s" % (
e.__class__.__name__, e))
boto.log.debug(
'corrupted JSON data found: %s' % val)
+ last_exception = e
- except Exception, e:
+ except Exception as e:
boto.log.debug("encountered unretryable" +
" '%s' exception, re-raising" % (
e.__class__.__name__))
+ last_exception = e
raise
boto.log.error("Caught exception reading meta data" +
" for the '%s' try" % (i + 1))
if i + 1 != self._num_retries:
- next_sleep = random.random() * (2 ** i)
+ next_sleep = min(
+ random.random() * 2 ** i,
+ boto.config.get('Boto', 'max_retry_delay', 60))
time.sleep(next_sleep)
else:
boto.log.error('Unable to read meta data, giving up')
boto.log.error(
"encountered '%s' exception: %s" % (
- e.__class__.__name__, e))
- raise
+ last_exception.__class__.__name__, last_exception))
+ raise last_exception
self[key] = val
elif key in self._dicts:
@@ -395,7 +397,7 @@ def get_instance_metadata(version='latest', url='http://169.254.169.254',
try:
metadata_url = _build_instance_metadata_url(url, version, data)
return _get_instance_metadata(metadata_url, num_retries=num_retries)
- except urllib2.URLError, e:
+ except urllib.error.URLError as e:
return None
finally:
if timeout is not None:
@@ -423,7 +425,7 @@ def get_instance_identity(version='latest', url='http://169.254.169.254',
if field:
iid[field] = val
return iid
- except urllib2.URLError, e:
+ except urllib.error.URLError as e:
return None
finally:
if timeout is not None:
@@ -446,7 +448,20 @@ def get_instance_userdata(version='latest', sep=None,
ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ'
RFC1123 = '%a, %d %b %Y %H:%M:%S %Z'
+LOCALE_LOCK = threading.Lock()
+@contextmanager
+def setlocale(name):
+ """
+ A context manager to set the locale in a threadsafe manner.
+ """
+ with LOCALE_LOCK:
+ saved = locale.setlocale(locale.LC_ALL)
+
+ try:
+ yield locale.setlocale(locale.LC_ALL, name)
+ finally:
+ locale.setlocale(locale.LC_ALL, saved)
def get_ts(ts=None):
if not ts:
@@ -455,17 +470,18 @@ def get_ts(ts=None):
def parse_ts(ts):
- ts = ts.strip()
- try:
- dt = datetime.datetime.strptime(ts, ISO8601)
- return dt
- except ValueError:
+ with setlocale('C'):
+ ts = ts.strip()
try:
- dt = datetime.datetime.strptime(ts, ISO8601_MS)
+ dt = datetime.datetime.strptime(ts, ISO8601)
return dt
except ValueError:
- dt = datetime.datetime.strptime(ts, RFC1123)
- return dt
+ try:
+ dt = datetime.datetime.strptime(ts, ISO8601_MS)
+ return dt
+ except ValueError:
+ dt = datetime.datetime.strptime(ts, RFC1123)
+ return dt
def find_class(module_name, class_name=None):
@@ -491,7 +507,7 @@ def update_dme(username, password, dme_id, ip_address):
"""
dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip'
dme_url += '?username=%s&password=%s&id=%s&ip=%s'
- s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address))
+ s = urllib.request.urlopen(dme_url % (username, password, dme_id, ip_address))
return s.read()
@@ -515,12 +531,12 @@ def fetch_file(uri, file=None, username=None, password=None):
key.get_contents_to_file(file)
else:
if username and password:
- passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ passman = urllib.request.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, uri, username, password)
- authhandler = urllib2.HTTPBasicAuthHandler(passman)
- opener = urllib2.build_opener(authhandler)
- urllib2.install_opener(opener)
- s = urllib2.urlopen(uri)
+ authhandler = urllib.request.HTTPBasicAuthHandler(passman)
+ opener = urllib.request.build_opener(authhandler)
+ urllib.request.install_opener(opener)
+ s = urllib.request.urlopen(uri)
file.write(s.read())
file.seek(0)
except:
@@ -535,7 +551,7 @@ class ShellCommand(object):
def __init__(self, command, wait=True, fail_fast=False, cwd=None):
self.exit_code = 0
self.command = command
- self.log_fp = StringIO.StringIO()
+ self.log_fp = StringIO()
self.wait = wait
self.fail_fast = fail_fast
self.run(cwd=cwd)
@@ -769,6 +785,8 @@ class Password(object):
self.hashfunc = hashfunc
def set(self, value):
+ if not isinstance(value, bytes):
+ value = value.encode('utf-8')
self.str = self.hashfunc(value).hexdigest()
def __str__(self):
@@ -777,6 +795,8 @@ class Password(object):
def __eq__(self, other):
if other is None:
return False
+ if not isinstance(other, bytes):
+ other = other.encode('utf-8')
return str(self.hashfunc(other).hexdigest()) == str(self.str)
def __len__(self):
@@ -843,13 +863,17 @@ def notify(subject, body=None, html_body=None, to_string=None,
def get_utf8_value(value):
- if not isinstance(value, basestring):
- value = str(value)
- if isinstance(value, unicode):
- return value.encode('utf-8')
- else:
+ if not six.PY2 and isinstance(value, bytes):
return value
+ if not isinstance(value, six.string_types):
+ value = six.text_type(value)
+
+ if isinstance(value, six.text_type):
+ value = value.encode('utf-8')
+
+ return value
+
def mklist(value):
if not isinstance(value, list):
@@ -912,7 +936,7 @@ def write_mime_multipart(content, compress=False, deftype='text/plain', delimite
rcontent = wrapper.as_string()
if compress:
- buf = StringIO.StringIO()
+ buf = StringIO()
gz = gzip.GzipFile(mode='wb', fileobj=buf)
try:
gz.write(rcontent)
@@ -987,6 +1011,8 @@ def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5):
else:
s = fp.read(buf_size)
while s:
+ if not isinstance(s, bytes):
+ s = s.encode('utf-8')
hash_obj.update(s)
if size:
size -= len(s)
@@ -997,7 +1023,7 @@ def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5):
else:
s = fp.read(buf_size)
hex_digest = hash_obj.hexdigest()
- base64_digest = base64.encodestring(hash_obj.digest())
+ base64_digest = encodebytes(hash_obj.digest()).decode('utf-8')
if base64_digest[-1] == '\n':
base64_digest = base64_digest[0:-1]
# data_size based on bytes read.
diff --git a/awx/lib/site-packages/boto/vendored/__init__.py b/awx/lib/site-packages/boto/vendored/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/awx/lib/site-packages/boto/vendored/six.py b/awx/lib/site-packages/boto/vendored/six.py
new file mode 100644
index 0000000000..55f5c3bfe3
--- /dev/null
+++ b/awx/lib/site-packages/boto/vendored/six.py
@@ -0,0 +1,756 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2014 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import functools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson "
+__version__ = "1.7.2"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ # This is a bit ugly, but it avoids running this again.
+ delattr(obj.__class__, self.name)
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
+ MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+else:
+ def iterkeys(d, **kw):
+ return iter(d.iterkeys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.itervalues(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.iteritems(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.iterlists(**kw))
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+ def u(s):
+ return s
+ unichr = chr
+ if sys.version_info[1] <= 1:
+ def int2byte(i):
+ return bytes((i,))
+ else:
+ # This is about 2x faster than the implementation above on 3.2+
+ int2byte = operator.methodcaller("to_bytes", 1, "big")
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+ def byte2int(bs):
+ return ord(bs[0])
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ def iterbytes(buf):
+ return (ord(byte) for byte in buf)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped):
+ def wrapper(f):
+ f = functools.wraps(wrapped)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a
+ # dummy metaclass for one level of class instantiation that replaces
+ # itself with the actual metaclass. Because of internal type checks
+ # we also need to make sure that we downgrade the custom metaclass
+ # for one level to something closer to type (that's why __call__ and
+ # __init__ comes back from type etc.).
+ class metaclass(meta):
+ __call__ = type.__call__
+ __init__ = type.__init__
+ def __new__(cls, name, this_bases, d):
+ if this_bases is None:
+ return type.__new__(cls, name, (), d)
+ return meta(name, bases, d)
+ return metaclass('temporary_class', None, {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+try:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+except NameError:
+ pass
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/awx/lib/site-packages/boto/vpc/__init__.py b/awx/lib/site-packages/boto/vpc/__init__.py
index 4025d6679f..ee9558c356 100644
--- a/awx/lib/site-packages/boto/vpc/__init__.py
+++ b/awx/lib/site-packages/boto/vpc/__init__.py
@@ -34,6 +34,7 @@ from boto.vpc.vpngateway import VpnGateway, Attachment
from boto.vpc.dhcpoptions import DhcpOptions
from boto.vpc.subnet import Subnet
from boto.vpc.vpnconnection import VpnConnection
+from boto.vpc.vpc_peering_connection import VpcPeeringConnection
from boto.ec2 import RegionData
from boto.regioninfo import RegionInfo, get_regions
@@ -85,9 +86,9 @@ class VPCConnection(EC2Connection):
:type vpc_ids: list
:param vpc_ids: A list of strings with the desired VPC ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
* *state* - a list of states of the VPC (pending or available)
@@ -104,7 +105,7 @@ class VPCConnection(EC2Connection):
if vpc_ids:
self.build_list_params(params, vpc_ids, 'VpcId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpcs', params, [('item', VPC)])
@@ -204,9 +205,9 @@ class VPCConnection(EC2Connection):
:param route_table_ids: A list of strings with the desired route table
IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -218,7 +219,7 @@ class VPCConnection(EC2Connection):
if route_table_ids:
self.build_list_params(params, route_table_ids, "RouteTableId")
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeRouteTables', params,
@@ -516,9 +517,9 @@ class VPCConnection(EC2Connection):
:param network_acl_ids: A list of strings with the desired network ACL
IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:rtype: list
:return: A list of :class:`boto.vpc.networkacl.NetworkAcl`
@@ -527,7 +528,7 @@ class VPCConnection(EC2Connection):
if network_acl_ids:
self.build_list_params(params, network_acl_ids, "NetworkAclId")
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
return self.get_list('DescribeNetworkAcls', params,
[('item', NetworkAcl)])
@@ -779,9 +780,9 @@ class VPCConnection(EC2Connection):
:type internet_gateway_ids: list
:param internet_gateway_ids: A list of strings with the desired gateway IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -793,7 +794,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, internet_gateway_ids,
'InternetGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeInternetGateways', params,
@@ -896,9 +897,9 @@ class VPCConnection(EC2Connection):
:param customer_gateway_ids: A list of strings with the desired
CustomerGateway ID's.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the CustomerGateway
@@ -918,7 +919,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, customer_gateway_ids,
'CustomerGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
@@ -985,9 +986,9 @@ class VPCConnection(EC2Connection):
:type vpn_gateway_ids: list
:param vpn_gateway_ids: A list of strings with the desired VpnGateway ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the VpnGateway
@@ -1006,7 +1007,7 @@ class VPCConnection(EC2Connection):
if vpn_gateway_ids:
self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpnGateways', params,
@@ -1109,9 +1110,9 @@ class VPCConnection(EC2Connection):
:type subnet_ids: list
:param subnet_ids: A list of strings with the desired Subnet ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the Subnet
@@ -1132,7 +1133,7 @@ class VPCConnection(EC2Connection):
if subnet_ids:
self.build_list_params(params, subnet_ids, 'SubnetId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeSubnets', params, [('item', Subnet)])
@@ -1192,9 +1193,9 @@ class VPCConnection(EC2Connection):
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -1206,7 +1207,7 @@ class VPCConnection(EC2Connection):
if dhcp_options_ids:
self.build_list_params(params, dhcp_options_ids, 'DhcpOptionsId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeDhcpOptions', params,
@@ -1339,9 +1340,9 @@ class VPCConnection(EC2Connection):
:type vpn_connection_ids: list
:param vpn_connection_ids: A list of strings with the desired VPN_CONNECTION ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the VPN_CONNECTION
@@ -1363,7 +1364,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, vpn_connection_ids,
'VpnConnectionId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpnConnections', params,
@@ -1533,3 +1534,140 @@ class VPCConnection(EC2Connection):
if dry_run:
params['DryRun'] = 'true'
return self.get_status('DeleteVpnConnectionRoute', params)
+
+ def get_all_vpc_peering_connections(self, vpc_peering_connection_ids=None,
+ filters=None, dry_run=False):
+ """
+ Retrieve information about your VPC peering connections. You
+ can filter results to return information only about those VPC
+ peering connections that match your search parameters.
+ Otherwise, all VPC peering connections associated with your
+ account are returned.
+
+ :type vpc_peering_connection_ids: list
+ :param vpc_peering_connection_ids: A list of strings with the desired VPC
+ peering connection ID's
+
+ :type filters: list of tuples
+ :param filters: A list of tuples containing filters. Each tuple
+ consists of a filter key and a filter value.
+ Possible filter keys are:
+
+ * *accepter-vpc-info.cidr-block* - The CIDR block of the peer VPC.
+ * *accepter-vpc-info.owner-id* - The AWS account ID of the owner
+ of the peer VPC.
+ * *accepter-vpc-info.vpc-id* - The ID of the peer VPC.
+ * *expiration-time* - The expiration date and time for the VPC
+ peering connection.
+ * *requester-vpc-info.cidr-block* - The CIDR block of the
+ requester's VPC.
+ * *requester-vpc-info.owner-id* - The AWS account ID of the
+ owner of the requester VPC.
+ * *requester-vpc-info.vpc-id* - The ID of the requester VPC.
+ * *status-code* - The status of the VPC peering connection.
+ * *status-message* - A message that provides more information
+ about the status of the VPC peering connection, if applicable.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :rtype: list
+ :return: A list of :class:`boto.vpc.vpc.VPC`
+ """
+ params = {}
+ if vpc_peering_connection_ids:
+ self.build_list_params(params, vpc_peering_connection_ids, 'VpcPeeringConnectionId')
+ if filters:
+ self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_list('DescribeVpcPeeringConnections', params, [('item', VpcPeeringConnection)])
+
+ def create_vpc_peering_connection(self, vpc_id, peer_vpc_id,
+ peer_owner_id=None, dry_run=False):
+ """
+ Create a new VPN Peering connection.
+
+ :type vpc_id: str
+ :param vpc_id: The ID of the requester VPC.
+
+ :type peer_vpc_id: str
+ :param vpc_peer_id: The ID of the VPC with which you are creating the peering connection.
+
+ :type peer_owner_id: str
+ :param peer_owner_id: The AWS account ID of the owner of the peer VPC.
+
+ :rtype: The newly created VpcPeeringConnection
+ :return: A :class:`boto.vpc.vpc_peering_connection.VpcPeeringConnection` object
+ """
+ params = {'VpcId': vpc_id,
+ 'PeerVpcId': peer_vpc_id }
+ if peer_owner_id is not None:
+ params['PeerOwnerId'] = peer_owner_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
+ return self.get_object('CreateVpcPeeringConnection', params,
+ VpcPeeringConnection)
+
+ def delete_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Deletes a VPC peering connection. Either the owner of the requester
+ VPC or the owner of the peer VPC can delete the VPC peering connection
+ if it's in the active state. The owner of the requester VPC can delete
+ a VPC peering connection in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: bool
+ :return: True if successful
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('DeleteVpcPeeringConnection', params)
+
+ def reject_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Rejects a VPC peering connection request. The VPC peering connection
+ must be in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: bool
+ :return: True if successful
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('RejectVpcPeeringConnection', params)
+
+ def accept_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Acceptss a VPC peering connection request. The VPC peering connection
+ must be in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: Accepted VpcPeeringConnection
+ :return: A :class:`boto.vpc.vpc_peering_connection.VpcPeeringConnection` object
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+
+ return self.get_object('AcceptVpcPeeringConnection', params,
+ VpcPeeringConnection)
+
diff --git a/awx/lib/site-packages/boto/vpc/vpc_peering_connection.py b/awx/lib/site-packages/boto/vpc/vpc_peering_connection.py
new file mode 100644
index 0000000000..cdb9af8dae
--- /dev/null
+++ b/awx/lib/site-packages/boto/vpc/vpc_peering_connection.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2014 Skytap http://skytap.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Represents a VPC Peering Connection.
+"""
+
+from boto.ec2.ec2object import TaggedEC2Object
+
+class VpcInfo(object):
+ def __init__(self):
+ """
+ Information on peer Vpc.
+
+ :ivar id: The unique ID of peer Vpc.
+ :ivar owner_id: Owner of peer Vpc.
+ :ivar cidr_block: CIDR Block of peer Vpc.
+ """
+
+ self.vpc_id = None
+ self.owner_id = None
+ self.cidr_block = None
+
+ def __repr__(self):
+ return 'VpcInfo:%s' % self.vpc_id
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'vpcId':
+ self.vpc_id = value
+ elif name == 'ownerId':
+ self.owner_id = value
+ elif name == 'cidrBlock':
+ self.cidr_block = value
+ else:
+ setattr(self, name, value)
+
+class VpcPeeringConnectionStatus(object):
+ """
+ The status of VPC peering connection.
+
+ :ivar code: The status of the VPC peering connection. Valid values are:
+
+ * pending-acceptance
+ * failed
+ * expired
+ * provisioning
+ * active
+ * deleted
+ * rejected
+
+ :ivar message: A message that provides more information about the status of the VPC peering connection, if applicable.
+ """
+ def __init__(self, code=0, message=None):
+ self.code = code
+ self.message = message
+
+ def __repr__(self):
+ return '%s(%d)' % (self.code, self.message)
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'code':
+ self.code = value
+ elif name == 'message':
+ self.message = value
+ else:
+ setattr(self, name, value)
+
+
+
+class VpcPeeringConnection(TaggedEC2Object):
+
+ def __init__(self, connection=None):
+ """
+ Represents a VPC peering connection.
+
+ :ivar id: The unique ID of the VPC peering connection.
+ :ivar accepter_vpc_info: Information on peer Vpc.
+ :ivar requester_vpc_info: Information on requester Vpc.
+ :ivar expiration_time: The expiration date and time for the VPC peering connection.
+ :ivar status_code: The status of the VPC peering connection.
+ :ivar status_message: A message that provides more information about the status of the VPC peering connection, if applicable.
+ """
+ super(VpcPeeringConnection, self).__init__(connection)
+ self.id = None
+ self.accepter_vpc_info = VpcInfo()
+ self.requester_vpc_info = VpcInfo()
+ self.expiration_time = None
+ self._status = VpcPeeringConnectionStatus()
+
+ @property
+ def status_code(self):
+ return self._status.code
+
+ @property
+ def status_message(self):
+ return self._status.message
+
+ def __repr__(self):
+ return 'VpcPeeringConnection:%s' % self.id
+
+ def startElement(self, name, attrs, connection):
+ retval = super(VpcPeeringConnection, self).startElement(name, attrs, connection)
+ if retval is not None:
+ return retval
+
+ if name == 'requesterVpcInfo':
+ return self.requester_vpc_info
+ elif name == 'accepterVpcInfo':
+ return self.accepter_vpc_info
+ elif name == 'status':
+ return self._status
+
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'vpcPeeringConnectionId':
+ self.id = value
+ elif name == 'expirationTime':
+ self.expiration_time = value
+ else:
+ setattr(self, name, value)
+
+ def delete(self):
+ return self.connection.delete_vpc_peering_connection(self.id)
+
+ def _update(self, updated):
+ self.__dict__.update(updated.__dict__)
+
+ def update(self, validate=False, dry_run=False):
+ vpc_peering_connection_list = self.connection.get_all_vpc_peering_connections(
+ [self.id],
+ dry_run=dry_run
+ )
+ if len(vpc_peering_connection_list):
+ updated_vpc_peering_connection = vpc_peering_connection_list[0]
+ self._update(updated_vpc_peering_connection)
+ elif validate:
+ raise ValueError('%s is not a valid VpcPeeringConnection ID' % (self.id,))
+ return self.status_code