diff --git a/Makefile b/Makefile index 807f0ba732..0bc3af431d 100644 --- a/Makefile +++ b/Makefile @@ -110,7 +110,7 @@ push: # locally downloaded packages). requirements: @if [ "$(VIRTUAL_ENV)" ]; then \ - (cd requirements && pip install --no-index setuptools-2.2.tar.gz); \ + (cd requirements && pip install --no-index setuptools-12.0.5.tar.gz); \ (cd requirements && pip install --no-index Django-1.6.7.tar.gz); \ (cd requirements && pip install --no-index -r dev_local.txt); \ $(PYTHON) fix_virtualenv_setuptools.py; \ @@ -122,7 +122,7 @@ requirements: # (downloading from PyPI if necessary). requirements_pypi: @if [ "$(VIRTUAL_ENV)" ]; then \ - pip install setuptools==2.2; \ + pip install setuptools==12.0.5; \ pip install Django\>=1.6.7,\<1.7; \ pip install -r requirements/dev.txt; \ $(PYTHON) fix_virtualenv_setuptools.py; \ diff --git a/awx/lib/site-packages/README b/awx/lib/site-packages/README index d0e4365ac3..e5db657b74 100644 --- a/awx/lib/site-packages/README +++ b/awx/lib/site-packages/README @@ -5,6 +5,7 @@ amqp==1.4.5 (amqp/*) ansi2html==1.0.6 (ansi2html/*) anyjson==0.3.3 (anyjson/*) argparse==1.2.1 (argparse.py, needed for Python 2.6 support) +azure==0.9.0 (azure/*) Babel==1.3 (babel/*, excluded bin/pybabel) billiard==3.3.0.16 (billiard/*, funtests/*, excluded _billiard.so) boto==2.34.0 (boto/*, excluded bin/asadmin, bin/bundle_image, bin/cfadmin, @@ -30,9 +31,9 @@ gevent-websocket==0.9.3 (geventwebsocket/*) httplib2==0.9 (httplib2/*) importlib==1.0.3 (importlib/*, needed for Python 2.6 support) iso8601==0.1.10 (iso8601/*) -keyring==4.0 (keyring/*, excluded bin/keyring) +keyring==4.1 (keyring/*, excluded bin/keyring) kombu==3.0.21 (kombu/*) -Markdown==2.4.1 (markdown/*, excluded bin/markdown_py) +Markdown==2.5.2 (markdown/*, excluded bin/markdown_py) mock==1.0.1 (mock.py) ordereddict==1.1 (ordereddict.py, needed for Python 2.6 support) os-diskconfig-python-novaclient-ext==0.1.2 (os_diskconfig_python_novaclient_ext/*) @@ -44,16 +45,16 @@ pexpect==3.1 (pexpect/*, excluded pxssh.py, fdpexpect.py, FSM.py, screen.py, pip==1.5.4 (pip/*, excluded bin/pip*) prettytable==0.7.2 (prettytable.py) pyrax==1.9.0 (pyrax/*) -python-dateutil==2.2 (dateutil/*) +python-dateutil==2.4.0 (dateutil/*) python-novaclient==2.18.1 (novaclient/*, excluded bin/nova) python-swiftclient==2.2.0 (swiftclient/*, excluded bin/swift) -pytz==2014.4 (pytz/*) +pytz==2014.10 (pytz/*) rackspace-auth-openstack==1.3 (rackspace_auth_openstack/*) rackspace-novaclient==1.4 (no files) rax-default-network-flags-python-novaclient-ext==0.2.3 (rax_default_network_flags_python_novaclient_ext/*) rax-scheduled-images-python-novaclient-ext==0.2.1 (rax_scheduled_images_python_novaclient_ext/*) -requests==2.3.0 (requests/*) -setuptools==2.2 (setuptools/*, _markerlib/*, pkg_resources.py, easy_install.py, excluded bin/easy_install*) +requests==2.5.1 (requests/*) +setuptools==12.0.5 (setuptools/*, _markerlib/*, pkg_resources/*, easy_install.py) simplejson==3.6.0 (simplejson/*, excluded simplejson/_speedups.so) -six==1.7.3 (six.py) +six==1.9.0 (six.py) South==0.8.4 (south/*) diff --git a/awx/lib/site-packages/azure/__init__.py b/awx/lib/site-packages/azure/__init__.py index d3228953f5..0e9250de04 100644 --- a/awx/lib/site-packages/azure/__init__.py +++ b/awx/lib/site-packages/azure/__init__.py @@ -1,905 +1,999 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import ast -import base64 -import sys -import types -import warnings -if sys.version_info < (3,): - from urllib2 import quote as url_quote - from urllib2 import unquote as url_unquote - _strtype = basestring -else: - from urllib.parse import quote as url_quote - from urllib.parse import unquote as url_unquote - _strtype = str - -from datetime import datetime -from xml.dom import minidom -from xml.sax.saxutils import escape as xml_escape - -#-------------------------------------------------------------------------- -# constants - -__author__ = 'Microsoft Corp. ' -__version__ = '0.8.1' - -# Live ServiceClient URLs -BLOB_SERVICE_HOST_BASE = '.blob.core.windows.net' -QUEUE_SERVICE_HOST_BASE = '.queue.core.windows.net' -TABLE_SERVICE_HOST_BASE = '.table.core.windows.net' -SERVICE_BUS_HOST_BASE = '.servicebus.windows.net' -MANAGEMENT_HOST = 'management.core.windows.net' - -# Development ServiceClient URLs -DEV_BLOB_HOST = '127.0.0.1:10000' -DEV_QUEUE_HOST = '127.0.0.1:10001' -DEV_TABLE_HOST = '127.0.0.1:10002' - -# Default credentials for Development Storage Service -DEV_ACCOUNT_NAME = 'devstoreaccount1' -DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' - -# All of our error messages -_ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.' -_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.' -_ERROR_INCORRECT_TABLE_IN_BATCH = \ - 'Table should be the same in a batch operations' -_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = \ - 'Partition Key should be the same in a batch operations' -_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = \ - 'Row Keys should not be the same in a batch operations' -_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail' -_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = \ - 'Message is not peek locked and cannot be deleted.' -_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = \ - 'Message is not peek locked and cannot be unlocked.' -_ERROR_QUEUE_NOT_FOUND = 'Queue was not found' -_ERROR_TOPIC_NOT_FOUND = 'Topic was not found' -_ERROR_CONFLICT = 'Conflict ({0})' -_ERROR_NOT_FOUND = 'Not found ({0})' -_ERROR_UNKNOWN = 'Unknown error ({0})' -_ERROR_SERVICEBUS_MISSING_INFO = \ - 'You need to provide servicebus namespace, access key and Issuer' -_ERROR_STORAGE_MISSING_INFO = \ - 'You need to provide both account name and access key' -_ERROR_ACCESS_POLICY = \ - 'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \ - 'instance' -_WARNING_VALUE_SHOULD_BE_BYTES = \ - 'Warning: {0} must be bytes data type. It will be converted ' + \ - 'automatically, with utf-8 text encoding.' -_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.' -_ERROR_VALUE_NONE = '{0} should not be None.' -_ERROR_VALUE_NEGATIVE = '{0} should not be negative.' -_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = \ - 'Cannot serialize the specified value ({0}) to an entity. Please use ' + \ - 'an EntityProperty (which can specify custom types), int, str, bool, ' + \ - 'or datetime.' -_ERROR_PAGE_BLOB_SIZE_ALIGNMENT = \ - 'Invalid page blob size: {0}. ' + \ - 'The size must be aligned to a 512-byte boundary.' - -_USER_AGENT_STRING = 'pyazure/' + __version__ - -METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata' - - -class WindowsAzureData(object): - - ''' This is the base of data class. - It is only used to check whether it is instance or not. ''' - pass - - -class WindowsAzureError(Exception): - - ''' WindowsAzure Excpetion base class. ''' - - def __init__(self, message): - super(WindowsAzureError, self).__init__(message) - - -class WindowsAzureConflictError(WindowsAzureError): - - '''Indicates that the resource could not be created because it already - exists''' - - def __init__(self, message): - super(WindowsAzureConflictError, self).__init__(message) - - -class WindowsAzureMissingResourceError(WindowsAzureError): - - '''Indicates that a request for a request for a resource (queue, table, - container, etc...) failed because the specified resource does not exist''' - - def __init__(self, message): - super(WindowsAzureMissingResourceError, self).__init__(message) - - -class WindowsAzureBatchOperationError(WindowsAzureError): - - '''Indicates that a batch operation failed''' - - def __init__(self, message, code): - super(WindowsAzureBatchOperationError, self).__init__(message) - self.code = code - - -class Feed(object): - pass - - -class _Base64String(str): - pass - - -class HeaderDict(dict): - - def __getitem__(self, index): - return super(HeaderDict, self).__getitem__(index.lower()) - - -def _encode_base64(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - encoded = base64.b64encode(data) - return encoded.decode('utf-8') - - -def _decode_base64_to_bytes(data): - if isinstance(data, _unicode_type): - data = data.encode('utf-8') - return base64.b64decode(data) - - -def _decode_base64_to_text(data): - decoded_bytes = _decode_base64_to_bytes(data) - return decoded_bytes.decode('utf-8') - - -def _get_readable_id(id_name, id_prefix_to_skip): - """simplified an id to be more friendly for us people""" - # id_name is in the form 'https://namespace.host.suffix/name' - # where name may contain a forward slash! - pos = id_name.find('//') - if pos != -1: - pos += 2 - if id_prefix_to_skip: - pos = id_name.find(id_prefix_to_skip, pos) - if pos != -1: - pos += len(id_prefix_to_skip) - pos = id_name.find('/', pos) - if pos != -1: - return id_name[pos + 1:] - return id_name - - -def _get_entry_properties(xmlstr, include_id, id_prefix_to_skip=None): - ''' get properties from entry xml ''' - xmldoc = minidom.parseString(xmlstr) - properties = {} - - for entry in _get_child_nodes(xmldoc, 'entry'): - etag = entry.getAttributeNS(METADATA_NS, 'etag') - if etag: - properties['etag'] = etag - for updated in _get_child_nodes(entry, 'updated'): - properties['updated'] = updated.firstChild.nodeValue - for name in _get_children_from_path(entry, 'author', 'name'): - if name.firstChild is not None: - properties['author'] = name.firstChild.nodeValue - - if include_id: - for id in _get_child_nodes(entry, 'id'): - properties['name'] = _get_readable_id( - id.firstChild.nodeValue, id_prefix_to_skip) - - return properties - - -def _get_first_child_node_value(parent_node, node_name): - xml_attrs = _get_child_nodes(parent_node, node_name) - if xml_attrs: - xml_attr = xml_attrs[0] - if xml_attr.firstChild: - value = xml_attr.firstChild.nodeValue - return value - - -def _get_child_nodes(node, tagName): - return [childNode for childNode in node.getElementsByTagName(tagName) - if childNode.parentNode == node] - - -def _get_children_from_path(node, *path): - '''descends through a hierarchy of nodes returning the list of children - at the inner most level. Only returns children who share a common parent, - not cousins.''' - cur = node - for index, child in enumerate(path): - if isinstance(child, _strtype): - next = _get_child_nodes(cur, child) - else: - next = _get_child_nodesNS(cur, *child) - if index == len(path) - 1: - return next - elif not next: - break - - cur = next[0] - return [] - - -def _get_child_nodesNS(node, ns, tagName): - return [childNode for childNode in node.getElementsByTagNameNS(ns, tagName) - if childNode.parentNode == node] - - -def _create_entry(entry_body): - ''' Adds common part of entry to a given entry body and return the whole - xml. ''' - updated_str = datetime.utcnow().isoformat() - if datetime.utcnow().utcoffset() is None: - updated_str += '+00:00' - - entry_start = ''' - -<updated>{updated}</updated><author><name /></author><id /> -<content type="application/xml"> - {body}</content></entry>''' - return entry_start.format(updated=updated_str, body=entry_body) - - -def _to_datetime(strtime): - return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f") - -_KNOWN_SERIALIZATION_XFORMS = { - 'include_apis': 'IncludeAPIs', - 'message_id': 'MessageId', - 'content_md5': 'Content-MD5', - 'last_modified': 'Last-Modified', - 'cache_control': 'Cache-Control', - 'account_admin_live_email_id': 'AccountAdminLiveEmailId', - 'service_admin_live_email_id': 'ServiceAdminLiveEmailId', - 'subscription_id': 'SubscriptionID', - 'fqdn': 'FQDN', - 'private_id': 'PrivateID', - 'os_virtual_hard_disk': 'OSVirtualHardDisk', - 'logical_disk_size_in_gb': 'LogicalDiskSizeInGB', - 'logical_size_in_gb': 'LogicalSizeInGB', - 'os': 'OS', - 'persistent_vm_downtime_info': 'PersistentVMDowntimeInfo', - 'copy_id': 'CopyId', - } - - -def _get_serialization_name(element_name): - """converts a Python name into a serializable name""" - known = _KNOWN_SERIALIZATION_XFORMS.get(element_name) - if known is not None: - return known - - if element_name.startswith('x_ms_'): - return element_name.replace('_', '-') - if element_name.endswith('_id'): - element_name = element_name.replace('_id', 'ID') - for name in ['content_', 'last_modified', 'if_', 'cache_control']: - if element_name.startswith(name): - element_name = element_name.replace('_', '-_') - - return ''.join(name.capitalize() for name in element_name.split('_')) - -if sys.version_info < (3,): - _unicode_type = unicode - - def _str(value): - if isinstance(value, unicode): - return value.encode('utf-8') - - return str(value) -else: - _str = str - _unicode_type = str - - -def _str_or_none(value): - if value is None: - return None - - return _str(value) - - -def _int_or_none(value): - if value is None: - return None - - return str(int(value)) - - -def _bool_or_none(value): - if value is None: - return None - - if isinstance(value, bool): - if value: - return 'true' - else: - return 'false' - - return str(value) - - -def _convert_class_to_xml(source, xml_prefix=True): - if source is None: - return '' - - xmlstr = '' - if xml_prefix: - xmlstr = '<?xml version="1.0" encoding="utf-8"?>' - - if isinstance(source, list): - for value in source: - xmlstr += _convert_class_to_xml(value, False) - elif isinstance(source, WindowsAzureData): - class_name = source.__class__.__name__ - xmlstr += '<' + class_name + '>' - for name, value in vars(source).items(): - if value is not None: - if isinstance(value, list) or \ - isinstance(value, WindowsAzureData): - xmlstr += _convert_class_to_xml(value, False) - else: - xmlstr += ('<' + _get_serialization_name(name) + '>' + - xml_escape(str(value)) + '</' + - _get_serialization_name(name) + '>') - xmlstr += '</' + class_name + '>' - return xmlstr - - -def _find_namespaces_from_child(parent, child, namespaces): - """Recursively searches from the parent to the child, - gathering all the applicable namespaces along the way""" - for cur_child in parent.childNodes: - if cur_child is child: - return True - if _find_namespaces_from_child(cur_child, child, namespaces): - # we are the parent node - for key in cur_child.attributes.keys(): - if key.startswith('xmlns:') or key == 'xmlns': - namespaces[key] = cur_child.attributes[key] - break - return False - - -def _find_namespaces(parent, child): - res = {} - for key in parent.documentElement.attributes.keys(): - if key.startswith('xmlns:') or key == 'xmlns': - res[key] = parent.documentElement.attributes[key] - _find_namespaces_from_child(parent, child, res) - return res - - -def _clone_node_with_namespaces(node_to_clone, original_doc): - clone = node_to_clone.cloneNode(True) - - for key, value in _find_namespaces(original_doc, node_to_clone).items(): - clone.attributes[key] = value - - return clone - - -def _convert_response_to_feeds(response, convert_func): - if response is None: - return None - - feeds = _list_of(Feed) - - x_ms_continuation = HeaderDict() - for name, value in response.headers: - if 'x-ms-continuation' in name: - x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value - if x_ms_continuation: - setattr(feeds, 'x_ms_continuation', x_ms_continuation) - - xmldoc = minidom.parseString(response.body) - xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry') - if not xml_entries: - # in some cases, response contains only entry but no feed - xml_entries = _get_children_from_path(xmldoc, 'entry') - for xml_entry in xml_entries: - new_node = _clone_node_with_namespaces(xml_entry, xmldoc) - feeds.append(convert_func(new_node.toxml('utf-8'))) - - return feeds - - -def _validate_type_bytes(param_name, param): - if not isinstance(param, bytes): - raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name)) - - -def _validate_not_none(param_name, param): - if param is None: - raise TypeError(_ERROR_VALUE_NONE.format(param_name)) - - -def _fill_list_of(xmldoc, element_type, xml_element_name): - xmlelements = _get_child_nodes(xmldoc, xml_element_name) - return [_parse_response_body_from_xml_node(xmlelement, element_type) \ - for xmlelement in xmlelements] - - -def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name, - xml_element_name): - '''Converts an xml fragment into a list of scalar types. The parent xml - element contains a flat list of xml elements which are converted into the - specified scalar type and added to the list. - Example: - xmldoc= -<Endpoints> - <Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint> - <Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint> - <Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint> -</Endpoints> - element_type=str - parent_xml_element_name='Endpoints' - xml_element_name='Endpoint' - ''' - xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name) - if xmlelements: - xmlelements = _get_child_nodes(xmlelements[0], xml_element_name) - return [_get_node_value(xmlelement, element_type) \ - for xmlelement in xmlelements] - - -def _fill_dict(xmldoc, element_name): - xmlelements = _get_child_nodes(xmldoc, element_name) - if xmlelements: - return_obj = {} - for child in xmlelements[0].childNodes: - if child.firstChild: - return_obj[child.nodeName] = child.firstChild.nodeValue - return return_obj - - -def _fill_dict_of(xmldoc, parent_xml_element_name, pair_xml_element_name, - key_xml_element_name, value_xml_element_name): - '''Converts an xml fragment into a dictionary. The parent xml element - contains a list of xml elements where each element has a child element for - the key, and another for the value. - Example: - xmldoc= -<ExtendedProperties> - <ExtendedProperty> - <Name>Ext1</Name> - <Value>Val1</Value> - </ExtendedProperty> - <ExtendedProperty> - <Name>Ext2</Name> - <Value>Val2</Value> - </ExtendedProperty> -</ExtendedProperties> - element_type=str - parent_xml_element_name='ExtendedProperties' - pair_xml_element_name='ExtendedProperty' - key_xml_element_name='Name' - value_xml_element_name='Value' - ''' - return_obj = {} - - xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name) - if xmlelements: - xmlelements = _get_child_nodes(xmlelements[0], pair_xml_element_name) - for pair in xmlelements: - keys = _get_child_nodes(pair, key_xml_element_name) - values = _get_child_nodes(pair, value_xml_element_name) - if keys and values: - key = keys[0].firstChild.nodeValue - value = values[0].firstChild.nodeValue - return_obj[key] = value - - return return_obj - - -def _fill_instance_child(xmldoc, element_name, return_type): - '''Converts a child of the current dom element to the specified type. - ''' - xmlelements = _get_child_nodes( - xmldoc, _get_serialization_name(element_name)) - - if not xmlelements: - return None - - return_obj = return_type() - _fill_data_to_return_object(xmlelements[0], return_obj) - - return return_obj - - -def _fill_instance_element(element, return_type): - """Converts a DOM element into the specified object""" - return _parse_response_body_from_xml_node(element, return_type) - - -def _fill_data_minidom(xmldoc, element_name, data_member): - xmlelements = _get_child_nodes( - xmldoc, _get_serialization_name(element_name)) - - if not xmlelements or not xmlelements[0].childNodes: - return None - - value = xmlelements[0].firstChild.nodeValue - - if data_member is None: - return value - elif isinstance(data_member, datetime): - return _to_datetime(value) - elif type(data_member) is bool: - return value.lower() != 'false' - else: - return type(data_member)(value) - - -def _get_node_value(xmlelement, data_type): - value = xmlelement.firstChild.nodeValue - if data_type is datetime: - return _to_datetime(value) - elif data_type is bool: - return value.lower() != 'false' - else: - return data_type(value) - - -def _get_request_body_bytes_only(param_name, param_value): - '''Validates the request body passed in and converts it to bytes - if our policy allows it.''' - if param_value is None: - return b'' - - if isinstance(param_value, bytes): - return param_value - - # Previous versions of the SDK allowed data types other than bytes to be - # passed in, and they would be auto-converted to bytes. We preserve this - # behavior when running under 2.7, but issue a warning. - # Python 3 support is new, so we reject anything that's not bytes. - if sys.version_info < (3,): - warnings.warn(_WARNING_VALUE_SHOULD_BE_BYTES.format(param_name)) - return _get_request_body(param_value) - - raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name)) - - -def _get_request_body(request_body): - '''Converts an object into a request body. If it's None - we'll return an empty string, if it's one of our objects it'll - convert it to XML and return it. Otherwise we just use the object - directly''' - if request_body is None: - return b'' - - if isinstance(request_body, WindowsAzureData): - request_body = _convert_class_to_xml(request_body) - - if isinstance(request_body, bytes): - return request_body - - if isinstance(request_body, _unicode_type): - return request_body.encode('utf-8') - - request_body = str(request_body) - if isinstance(request_body, _unicode_type): - return request_body.encode('utf-8') - - return request_body - - -def _parse_enum_results_list(response, return_type, resp_type, item_type): - """resp_body is the XML we received -resp_type is a string, such as Containers, -return_type is the type we're constructing, such as ContainerEnumResults -item_type is the type object of the item to be created, such as Container - -This function then returns a ContainerEnumResults object with the -containers member populated with the results. -""" - - # parsing something like: - # <EnumerationResults ... > - # <Queues> - # <Queue> - # <Something /> - # <SomethingElse /> - # </Queue> - # </Queues> - # </EnumerationResults> - respbody = response.body - return_obj = return_type() - doc = minidom.parseString(respbody) - - items = [] - for enum_results in _get_child_nodes(doc, 'EnumerationResults'): - # path is something like Queues, Queue - for child in _get_children_from_path(enum_results, - resp_type, - resp_type[:-1]): - items.append(_fill_instance_element(child, item_type)) - - for name, value in vars(return_obj).items(): - # queues, Queues, this is the list its self which we populated - # above - if name == resp_type.lower(): - # the list its self. - continue - value = _fill_data_minidom(enum_results, name, value) - if value is not None: - setattr(return_obj, name, value) - - setattr(return_obj, resp_type.lower(), items) - return return_obj - - -def _parse_simple_list(response, type, item_type, list_name): - respbody = response.body - res = type() - res_items = [] - doc = minidom.parseString(respbody) - type_name = type.__name__ - item_name = item_type.__name__ - for item in _get_children_from_path(doc, type_name, item_name): - res_items.append(_fill_instance_element(item, item_type)) - - setattr(res, list_name, res_items) - return res - - -def _parse_response(response, return_type): - ''' - Parse the HTTPResponse's body and fill all the data into a class of - return_type. - ''' - return _parse_response_body_from_xml_text(response.body, return_type) - - -def _fill_data_to_return_object(node, return_obj): - members = dict(vars(return_obj)) - for name, value in members.items(): - if isinstance(value, _list_of): - setattr(return_obj, - name, - _fill_list_of(node, - value.list_type, - value.xml_element_name)) - elif isinstance(value, _scalar_list_of): - setattr(return_obj, - name, - _fill_scalar_list_of(node, - value.list_type, - _get_serialization_name(name), - value.xml_element_name)) - elif isinstance(value, _dict_of): - setattr(return_obj, - name, - _fill_dict_of(node, - _get_serialization_name(name), - value.pair_xml_element_name, - value.key_xml_element_name, - value.value_xml_element_name)) - elif isinstance(value, WindowsAzureData): - setattr(return_obj, - name, - _fill_instance_child(node, name, value.__class__)) - elif isinstance(value, dict): - setattr(return_obj, - name, - _fill_dict(node, _get_serialization_name(name))) - elif isinstance(value, _Base64String): - value = _fill_data_minidom(node, name, '') - if value is not None: - value = _decode_base64_to_text(value) - # always set the attribute, so we don't end up returning an object - # with type _Base64String - setattr(return_obj, name, value) - else: - value = _fill_data_minidom(node, name, value) - if value is not None: - setattr(return_obj, name, value) - - -def _parse_response_body_from_xml_node(node, return_type): - ''' - parse the xml and fill all the data into a class of return_type - ''' - return_obj = return_type() - _fill_data_to_return_object(node, return_obj) - - return return_obj - - -def _parse_response_body_from_xml_text(respbody, return_type): - ''' - parse the xml and fill all the data into a class of return_type - ''' - doc = minidom.parseString(respbody) - return_obj = return_type() - for node in _get_child_nodes(doc, return_type.__name__): - _fill_data_to_return_object(node, return_obj) - - return return_obj - - -class _dict_of(dict): - - """a dict which carries with it the xml element names for key,val. - Used for deserializaion and construction of the lists""" - - def __init__(self, pair_xml_element_name, key_xml_element_name, - value_xml_element_name): - self.pair_xml_element_name = pair_xml_element_name - self.key_xml_element_name = key_xml_element_name - self.value_xml_element_name = value_xml_element_name - super(_dict_of, self).__init__() - - -class _list_of(list): - - """a list which carries with it the type that's expected to go in it. - Used for deserializaion and construction of the lists""" - - def __init__(self, list_type, xml_element_name=None): - self.list_type = list_type - if xml_element_name is None: - self.xml_element_name = list_type.__name__ - else: - self.xml_element_name = xml_element_name - super(_list_of, self).__init__() - - -class _scalar_list_of(list): - - """a list of scalar types which carries with it the type that's - expected to go in it along with its xml element name. - Used for deserializaion and construction of the lists""" - - def __init__(self, list_type, xml_element_name): - self.list_type = list_type - self.xml_element_name = xml_element_name - super(_scalar_list_of, self).__init__() - - -def _update_request_uri_query_local_storage(request, use_local_storage): - ''' create correct uri and query for the request ''' - uri, query = _update_request_uri_query(request) - if use_local_storage: - return '/' + DEV_ACCOUNT_NAME + uri, query - return uri, query - - -def _update_request_uri_query(request): - '''pulls the query string out of the URI and moves it into - the query portion of the request object. If there are already - query parameters on the request the parameters in the URI will - appear after the existing parameters''' - - if '?' in request.path: - request.path, _, query_string = request.path.partition('?') - if query_string: - query_params = query_string.split('&') - for query in query_params: - if '=' in query: - name, _, value = query.partition('=') - request.query.append((name, value)) - - request.path = url_quote(request.path, '/()$=\',') - - # add encoded queries to request.path. - if request.query: - request.path += '?' - for name, value in request.query: - if value is not None: - request.path += name + '=' + url_quote(value, '/()$=\',') + '&' - request.path = request.path[:-1] - - return request.path, request.query - - -def _dont_fail_on_exist(error): - ''' don't throw exception if the resource exists. - This is called by create_* APIs with fail_on_exist=False''' - if isinstance(error, WindowsAzureConflictError): - return False - else: - raise error - - -def _dont_fail_not_exist(error): - ''' don't throw exception if the resource doesn't exist. - This is called by create_* APIs with fail_on_exist=False''' - if isinstance(error, WindowsAzureMissingResourceError): - return False - else: - raise error - - -def _general_error_handler(http_error): - ''' Simple error handler for azure.''' - if http_error.status == 409: - raise WindowsAzureConflictError( - _ERROR_CONFLICT.format(str(http_error))) - elif http_error.status == 404: - raise WindowsAzureMissingResourceError( - _ERROR_NOT_FOUND.format(str(http_error))) - else: - if http_error.respbody is not None: - raise WindowsAzureError( - _ERROR_UNKNOWN.format(str(http_error)) + '\n' + \ - http_error.respbody.decode('utf-8')) - else: - raise WindowsAzureError(_ERROR_UNKNOWN.format(str(http_error))) - - -def _parse_response_for_dict(response): - ''' Extracts name-values from response header. Filter out the standard - http headers.''' - - if response is None: - return None - http_headers = ['server', 'date', 'location', 'host', - 'via', 'proxy-connection', 'connection'] - return_dict = HeaderDict() - if response.headers: - for name, value in response.headers: - if not name.lower() in http_headers: - return_dict[name] = value - - return return_dict - - -def _parse_response_for_dict_prefix(response, prefixes): - ''' Extracts name-values for names starting with prefix from response - header. Filter out the standard http headers.''' - - if response is None: - return None - return_dict = {} - orig_dict = _parse_response_for_dict(response) - if orig_dict: - for name, value in orig_dict.items(): - for prefix_value in prefixes: - if name.lower().startswith(prefix_value.lower()): - return_dict[name] = value - break - return return_dict - else: - return None - - -def _parse_response_for_dict_filter(response, filter): - ''' Extracts name-values for names in filter from response header. Filter - out the standard http headers.''' - if response is None: - return None - return_dict = {} - orig_dict = _parse_response_for_dict(response) - if orig_dict: - for name, value in orig_dict.items(): - if name.lower() in filter: - return_dict[name] = value - return return_dict - else: - return None +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import ast +import base64 +import hashlib +import hmac +import sys +import types +import warnings +if sys.version_info < (3,): + from urllib2 import quote as url_quote + from urllib2 import unquote as url_unquote + _strtype = basestring +else: + from urllib.parse import quote as url_quote + from urllib.parse import unquote as url_unquote + _strtype = str + +from datetime import datetime +from xml.dom import minidom +from xml.sax.saxutils import escape as xml_escape + +#-------------------------------------------------------------------------- +# constants + +__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>' +__version__ = '0.9.0' + +# Live ServiceClient URLs +BLOB_SERVICE_HOST_BASE = '.blob.core.windows.net' +QUEUE_SERVICE_HOST_BASE = '.queue.core.windows.net' +TABLE_SERVICE_HOST_BASE = '.table.core.windows.net' +SERVICE_BUS_HOST_BASE = '.servicebus.windows.net' +MANAGEMENT_HOST = 'management.core.windows.net' + +# Development ServiceClient URLs +DEV_BLOB_HOST = '127.0.0.1:10000' +DEV_QUEUE_HOST = '127.0.0.1:10001' +DEV_TABLE_HOST = '127.0.0.1:10002' + +# Default credentials for Development Storage Service +DEV_ACCOUNT_NAME = 'devstoreaccount1' +DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' + +# All of our error messages +_ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.' +_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.' +_ERROR_INCORRECT_TABLE_IN_BATCH = \ + 'Table should be the same in a batch operations' +_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = \ + 'Partition Key should be the same in a batch operations' +_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = \ + 'Row Keys should not be the same in a batch operations' +_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail' +_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = \ + 'Message is not peek locked and cannot be deleted.' +_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = \ + 'Message is not peek locked and cannot be unlocked.' +_ERROR_QUEUE_NOT_FOUND = 'Queue was not found' +_ERROR_TOPIC_NOT_FOUND = 'Topic was not found' +_ERROR_CONFLICT = 'Conflict ({0})' +_ERROR_NOT_FOUND = 'Not found ({0})' +_ERROR_UNKNOWN = 'Unknown error ({0})' +_ERROR_SERVICEBUS_MISSING_INFO = \ + 'You need to provide servicebus namespace, access key and Issuer' +_ERROR_STORAGE_MISSING_INFO = \ + 'You need to provide both account name and access key' +_ERROR_ACCESS_POLICY = \ + 'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \ + 'instance' +_WARNING_VALUE_SHOULD_BE_BYTES = \ + 'Warning: {0} must be bytes data type. It will be converted ' + \ + 'automatically, with utf-8 text encoding.' +_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.' +_ERROR_VALUE_NONE = '{0} should not be None.' +_ERROR_VALUE_NEGATIVE = '{0} should not be negative.' +_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = \ + 'Cannot serialize the specified value ({0}) to an entity. Please use ' + \ + 'an EntityProperty (which can specify custom types), int, str, bool, ' + \ + 'or datetime.' +_ERROR_PAGE_BLOB_SIZE_ALIGNMENT = \ + 'Invalid page blob size: {0}. ' + \ + 'The size must be aligned to a 512-byte boundary.' + +_USER_AGENT_STRING = 'pyazure/' + __version__ + +METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata' + + +class WindowsAzureData(object): + + ''' This is the base of data class. + It is only used to check whether it is instance or not. ''' + pass + +class WindowsAzureError(Exception): + + ''' WindowsAzure Exception base class. ''' + + def __init__(self, message): + super(WindowsAzureError, self).__init__(message) + + +class WindowsAzureConflictError(WindowsAzureError): + + '''Indicates that the resource could not be created because it already + exists''' + + def __init__(self, message): + super(WindowsAzureConflictError, self).__init__(message) + + +class WindowsAzureMissingResourceError(WindowsAzureError): + + '''Indicates that a request for a request for a resource (queue, table, + container, etc...) failed because the specified resource does not exist''' + + def __init__(self, message): + super(WindowsAzureMissingResourceError, self).__init__(message) + + +class WindowsAzureBatchOperationError(WindowsAzureError): + + '''Indicates that a batch operation failed''' + + def __init__(self, message, code): + super(WindowsAzureBatchOperationError, self).__init__(message) + self.code = code + + +class Feed(object): + pass + + +class _Base64String(str): + pass + + +class HeaderDict(dict): + + def __getitem__(self, index): + return super(HeaderDict, self).__getitem__(index.lower()) + + +def _encode_base64(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + encoded = base64.b64encode(data) + return encoded.decode('utf-8') + + +def _decode_base64_to_bytes(data): + if isinstance(data, _unicode_type): + data = data.encode('utf-8') + return base64.b64decode(data) + + +def _decode_base64_to_text(data): + decoded_bytes = _decode_base64_to_bytes(data) + return decoded_bytes.decode('utf-8') + + +def _get_readable_id(id_name, id_prefix_to_skip): + """simplified an id to be more friendly for us people""" + # id_name is in the form 'https://namespace.host.suffix/name' + # where name may contain a forward slash! + pos = id_name.find('//') + if pos != -1: + pos += 2 + if id_prefix_to_skip: + pos = id_name.find(id_prefix_to_skip, pos) + if pos != -1: + pos += len(id_prefix_to_skip) + pos = id_name.find('/', pos) + if pos != -1: + return id_name[pos + 1:] + return id_name + + +def _get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False): + ''' get properties from entry xml ''' + properties = {} + + etag = entry.getAttributeNS(METADATA_NS, 'etag') + if etag: + properties['etag'] = etag + for updated in _get_child_nodes(entry, 'updated'): + properties['updated'] = updated.firstChild.nodeValue + for name in _get_children_from_path(entry, 'author', 'name'): + if name.firstChild is not None: + properties['author'] = name.firstChild.nodeValue + + if include_id: + if use_title_as_id: + for title in _get_child_nodes(entry, 'title'): + properties['name'] = title.firstChild.nodeValue + else: + for id in _get_child_nodes(entry, 'id'): + properties['name'] = _get_readable_id( + id.firstChild.nodeValue, id_prefix_to_skip) + + return properties + + +def _get_entry_properties(xmlstr, include_id, id_prefix_to_skip=None): + ''' get properties from entry xml ''' + xmldoc = minidom.parseString(xmlstr) + properties = {} + + for entry in _get_child_nodes(xmldoc, 'entry'): + properties.update(_get_entry_properties_from_node(entry, include_id, id_prefix_to_skip)) + + return properties + + +def _get_first_child_node_value(parent_node, node_name): + xml_attrs = _get_child_nodes(parent_node, node_name) + if xml_attrs: + xml_attr = xml_attrs[0] + if xml_attr.firstChild: + value = xml_attr.firstChild.nodeValue + return value + + +def _get_child_nodes(node, tagName): + return [childNode for childNode in node.getElementsByTagName(tagName) + if childNode.parentNode == node] + + +def _get_children_from_path(node, *path): + '''descends through a hierarchy of nodes returning the list of children + at the inner most level. Only returns children who share a common parent, + not cousins.''' + cur = node + for index, child in enumerate(path): + if isinstance(child, _strtype): + next = _get_child_nodes(cur, child) + else: + next = _get_child_nodesNS(cur, *child) + if index == len(path) - 1: + return next + elif not next: + break + + cur = next[0] + return [] + + +def _get_child_nodesNS(node, ns, tagName): + return [childNode for childNode in node.getElementsByTagNameNS(ns, tagName) + if childNode.parentNode == node] + + +def _create_entry(entry_body): + ''' Adds common part of entry to a given entry body and return the whole + xml. ''' + updated_str = datetime.utcnow().isoformat() + if datetime.utcnow().utcoffset() is None: + updated_str += '+00:00' + + entry_start = '''<?xml version="1.0" encoding="utf-8" standalone="yes"?> +<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom" > +<title /><updated>{updated}</updated><author><name /></author><id /> +<content type="application/xml"> + {body}</content></entry>''' + return entry_start.format(updated=updated_str, body=entry_body) + + +def _to_datetime(strtime): + return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f") + +_KNOWN_SERIALIZATION_XFORMS = { + 'include_apis': 'IncludeAPIs', + 'message_id': 'MessageId', + 'content_md5': 'Content-MD5', + 'last_modified': 'Last-Modified', + 'cache_control': 'Cache-Control', + 'account_admin_live_email_id': 'AccountAdminLiveEmailId', + 'service_admin_live_email_id': 'ServiceAdminLiveEmailId', + 'subscription_id': 'SubscriptionID', + 'fqdn': 'FQDN', + 'private_id': 'PrivateID', + 'os_virtual_hard_disk': 'OSVirtualHardDisk', + 'logical_disk_size_in_gb': 'LogicalDiskSizeInGB', + 'logical_size_in_gb': 'LogicalSizeInGB', + 'os': 'OS', + 'persistent_vm_downtime_info': 'PersistentVMDowntimeInfo', + 'copy_id': 'CopyId', + 'os_state': 'OSState', + 'vm_image': 'VMImage', + 'vm_images': 'VMImages', + 'os_disk_configuration': 'OSDiskConfiguration', + 'public_ips': 'PublicIPs', + 'public_ip': 'PublicIP', + 'supported_os': 'SupportedOS', + 'reserved_ip': 'ReservedIP', + 'reserved_ips': 'ReservedIPs', + 'aad_tenant_id': 'AADTenantID', + 'start_ip_address': 'StartIPAddress', + 'end_ip_address': 'EndIPAddress', + } + + +def _get_serialization_name(element_name): + """converts a Python name into a serializable name""" + known = _KNOWN_SERIALIZATION_XFORMS.get(element_name) + if known is not None: + return known + + if element_name.startswith('x_ms_'): + return element_name.replace('_', '-') + if element_name.endswith('_id'): + element_name = element_name.replace('_id', 'ID') + for name in ['content_', 'last_modified', 'if_', 'cache_control']: + if element_name.startswith(name): + element_name = element_name.replace('_', '-_') + + return ''.join(name.capitalize() for name in element_name.split('_')) + +if sys.version_info < (3,): + _unicode_type = unicode + + def _str(value): + if isinstance(value, unicode): + return value.encode('utf-8') + + return str(value) +else: + _str = str + _unicode_type = str + + +def _str_or_none(value): + if value is None: + return None + + return _str(value) + + +def _int_or_none(value): + if value is None: + return None + + return str(int(value)) + + +def _bool_or_none(value): + if value is None: + return None + + if isinstance(value, bool): + if value: + return 'true' + else: + return 'false' + + return str(value) + + +def _convert_class_to_xml(source, xml_prefix=True): + if source is None: + return '' + + xmlstr = '' + if xml_prefix: + xmlstr = '<?xml version="1.0" encoding="utf-8"?>' + + if isinstance(source, list): + for value in source: + xmlstr += _convert_class_to_xml(value, False) + elif isinstance(source, WindowsAzureData): + class_name = source.__class__.__name__ + xmlstr += '<' + class_name + '>' + for name, value in vars(source).items(): + if value is not None: + if isinstance(value, list) or \ + isinstance(value, WindowsAzureData): + xmlstr += _convert_class_to_xml(value, False) + else: + xmlstr += ('<' + _get_serialization_name(name) + '>' + + xml_escape(str(value)) + '</' + + _get_serialization_name(name) + '>') + xmlstr += '</' + class_name + '>' + return xmlstr + + +def _find_namespaces_from_child(parent, child, namespaces): + """Recursively searches from the parent to the child, + gathering all the applicable namespaces along the way""" + for cur_child in parent.childNodes: + if cur_child is child: + return True + if _find_namespaces_from_child(cur_child, child, namespaces): + # we are the parent node + for key in cur_child.attributes.keys(): + if key.startswith('xmlns:') or key == 'xmlns': + namespaces[key] = cur_child.attributes[key] + break + return False + + +def _find_namespaces(parent, child): + res = {} + for key in parent.documentElement.attributes.keys(): + if key.startswith('xmlns:') or key == 'xmlns': + res[key] = parent.documentElement.attributes[key] + _find_namespaces_from_child(parent, child, res) + return res + + +def _clone_node_with_namespaces(node_to_clone, original_doc): + clone = node_to_clone.cloneNode(True) + + for key, value in _find_namespaces(original_doc, node_to_clone).items(): + clone.attributes[key] = value + + return clone + + +def _convert_response_to_feeds(response, convert_func): + if response is None: + return None + + feeds = _list_of(Feed) + + x_ms_continuation = HeaderDict() + for name, value in response.headers: + if 'x-ms-continuation' in name: + x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value + if x_ms_continuation: + setattr(feeds, 'x_ms_continuation', x_ms_continuation) + + xmldoc = minidom.parseString(response.body) + xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry') + if not xml_entries: + # in some cases, response contains only entry but no feed + xml_entries = _get_children_from_path(xmldoc, 'entry') + for xml_entry in xml_entries: + new_node = _clone_node_with_namespaces(xml_entry, xmldoc) + feeds.append(convert_func(new_node.toxml('utf-8'))) + + return feeds + + +def _convert_xml_to_windows_azure_object(xmlstr, azure_type, include_id=True, use_title_as_id=True): + xmldoc = minidom.parseString(xmlstr) + return_obj = azure_type() + xml_name = azure_type._xml_name if hasattr(azure_type, '_xml_name') else azure_type.__name__ + + # Only one entry here + for xml_entry in _get_children_from_path(xmldoc, + 'entry'): + for node in _get_children_from_path(xml_entry, + 'content', + xml_name): + _fill_data_to_return_object(node, return_obj) + for name, value in _get_entry_properties_from_node(xml_entry, + include_id=include_id, + use_title_as_id=use_title_as_id).items(): + setattr(return_obj, name, value) + return return_obj + + +def _validate_type_bytes(param_name, param): + if not isinstance(param, bytes): + raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name)) + + +def _validate_not_none(param_name, param): + if param is None: + raise TypeError(_ERROR_VALUE_NONE.format(param_name)) + + +def _fill_list_of(xmldoc, element_type, xml_element_name): + xmlelements = _get_child_nodes(xmldoc, xml_element_name) + return [_parse_response_body_from_xml_node(xmlelement, element_type) \ + for xmlelement in xmlelements] + + +def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name, + xml_element_name): + '''Converts an xml fragment into a list of scalar types. The parent xml + element contains a flat list of xml elements which are converted into the + specified scalar type and added to the list. + Example: + xmldoc= +<Endpoints> + <Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint> + <Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint> + <Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint> +</Endpoints> + element_type=str + parent_xml_element_name='Endpoints' + xml_element_name='Endpoint' + ''' + xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name) + if xmlelements: + xmlelements = _get_child_nodes(xmlelements[0], xml_element_name) + return [_get_node_value(xmlelement, element_type) \ + for xmlelement in xmlelements] + + +def _fill_dict(xmldoc, element_name): + xmlelements = _get_child_nodes(xmldoc, element_name) + if xmlelements: + return_obj = {} + for child in xmlelements[0].childNodes: + if child.firstChild: + return_obj[child.nodeName] = child.firstChild.nodeValue + return return_obj + + +def _fill_dict_of(xmldoc, parent_xml_element_name, pair_xml_element_name, + key_xml_element_name, value_xml_element_name): + '''Converts an xml fragment into a dictionary. The parent xml element + contains a list of xml elements where each element has a child element for + the key, and another for the value. + Example: + xmldoc= +<ExtendedProperties> + <ExtendedProperty> + <Name>Ext1</Name> + <Value>Val1</Value> + </ExtendedProperty> + <ExtendedProperty> + <Name>Ext2</Name> + <Value>Val2</Value> + </ExtendedProperty> +</ExtendedProperties> + element_type=str + parent_xml_element_name='ExtendedProperties' + pair_xml_element_name='ExtendedProperty' + key_xml_element_name='Name' + value_xml_element_name='Value' + ''' + return_obj = {} + + xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name) + if xmlelements: + xmlelements = _get_child_nodes(xmlelements[0], pair_xml_element_name) + for pair in xmlelements: + keys = _get_child_nodes(pair, key_xml_element_name) + values = _get_child_nodes(pair, value_xml_element_name) + if keys and values: + key = keys[0].firstChild.nodeValue + value = values[0].firstChild.nodeValue + return_obj[key] = value + + return return_obj + + +def _fill_instance_child(xmldoc, element_name, return_type): + '''Converts a child of the current dom element to the specified type. + ''' + xmlelements = _get_child_nodes( + xmldoc, _get_serialization_name(element_name)) + + if not xmlelements: + return None + + return_obj = return_type() + _fill_data_to_return_object(xmlelements[0], return_obj) + + return return_obj + + +def _fill_instance_element(element, return_type): + """Converts a DOM element into the specified object""" + return _parse_response_body_from_xml_node(element, return_type) + + +def _fill_data_minidom(xmldoc, element_name, data_member): + xmlelements = _get_child_nodes( + xmldoc, _get_serialization_name(element_name)) + + if not xmlelements or not xmlelements[0].childNodes: + return None + + value = xmlelements[0].firstChild.nodeValue + + if data_member is None: + return value + elif isinstance(data_member, datetime): + return _to_datetime(value) + elif type(data_member) is bool: + return value.lower() != 'false' + else: + return type(data_member)(value) + + +def _get_node_value(xmlelement, data_type): + value = xmlelement.firstChild.nodeValue + if data_type is datetime: + return _to_datetime(value) + elif data_type is bool: + return value.lower() != 'false' + else: + return data_type(value) + + +def _get_request_body_bytes_only(param_name, param_value): + '''Validates the request body passed in and converts it to bytes + if our policy allows it.''' + if param_value is None: + return b'' + + if isinstance(param_value, bytes): + return param_value + + # Previous versions of the SDK allowed data types other than bytes to be + # passed in, and they would be auto-converted to bytes. We preserve this + # behavior when running under 2.7, but issue a warning. + # Python 3 support is new, so we reject anything that's not bytes. + if sys.version_info < (3,): + warnings.warn(_WARNING_VALUE_SHOULD_BE_BYTES.format(param_name)) + return _get_request_body(param_value) + + raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name)) + + +def _get_request_body(request_body): + '''Converts an object into a request body. If it's None + we'll return an empty string, if it's one of our objects it'll + convert it to XML and return it. Otherwise we just use the object + directly''' + if request_body is None: + return b'' + + if isinstance(request_body, WindowsAzureData): + request_body = _convert_class_to_xml(request_body) + + if isinstance(request_body, bytes): + return request_body + + if isinstance(request_body, _unicode_type): + return request_body.encode('utf-8') + + request_body = str(request_body) + if isinstance(request_body, _unicode_type): + return request_body.encode('utf-8') + + return request_body + + +def _parse_enum_results_list(response, return_type, resp_type, item_type): + """resp_body is the XML we received +resp_type is a string, such as Containers, +return_type is the type we're constructing, such as ContainerEnumResults +item_type is the type object of the item to be created, such as Container + +This function then returns a ContainerEnumResults object with the +containers member populated with the results. +""" + + # parsing something like: + # <EnumerationResults ... > + # <Queues> + # <Queue> + # <Something /> + # <SomethingElse /> + # </Queue> + # </Queues> + # </EnumerationResults> + respbody = response.body + return_obj = return_type() + doc = minidom.parseString(respbody) + + items = [] + for enum_results in _get_child_nodes(doc, 'EnumerationResults'): + # path is something like Queues, Queue + for child in _get_children_from_path(enum_results, + resp_type, + resp_type[:-1]): + items.append(_fill_instance_element(child, item_type)) + + for name, value in vars(return_obj).items(): + # queues, Queues, this is the list its self which we populated + # above + if name == resp_type.lower(): + # the list its self. + continue + value = _fill_data_minidom(enum_results, name, value) + if value is not None: + setattr(return_obj, name, value) + + setattr(return_obj, resp_type.lower(), items) + return return_obj + + +def _parse_simple_list(response, type, item_type, list_name): + respbody = response.body + res = type() + res_items = [] + doc = minidom.parseString(respbody) + type_name = type.__name__ + item_name = item_type.__name__ + for item in _get_children_from_path(doc, type_name, item_name): + res_items.append(_fill_instance_element(item, item_type)) + + setattr(res, list_name, res_items) + return res + + +def _parse_response(response, return_type): + ''' + Parse the HTTPResponse's body and fill all the data into a class of + return_type. + ''' + return _parse_response_body_from_xml_text(response.body, return_type) + +def _parse_service_resources_response(response, return_type): + ''' + Parse the HTTPResponse's body and fill all the data into a class of + return_type. + ''' + return _parse_response_body_from_service_resources_xml_text(response.body, return_type) + + +def _fill_data_to_return_object(node, return_obj): + members = dict(vars(return_obj)) + for name, value in members.items(): + if isinstance(value, _list_of): + setattr(return_obj, + name, + _fill_list_of(node, + value.list_type, + value.xml_element_name)) + elif isinstance(value, _scalar_list_of): + setattr(return_obj, + name, + _fill_scalar_list_of(node, + value.list_type, + _get_serialization_name(name), + value.xml_element_name)) + elif isinstance(value, _dict_of): + setattr(return_obj, + name, + _fill_dict_of(node, + _get_serialization_name(name), + value.pair_xml_element_name, + value.key_xml_element_name, + value.value_xml_element_name)) + elif isinstance(value, _xml_attribute): + real_value = None + if node.hasAttribute(value.xml_element_name): + real_value = node.getAttribute(value.xml_element_name) + if real_value is not None: + setattr(return_obj, name, real_value) + elif isinstance(value, WindowsAzureData): + setattr(return_obj, + name, + _fill_instance_child(node, name, value.__class__)) + elif isinstance(value, dict): + setattr(return_obj, + name, + _fill_dict(node, _get_serialization_name(name))) + elif isinstance(value, _Base64String): + value = _fill_data_minidom(node, name, '') + if value is not None: + value = _decode_base64_to_text(value) + # always set the attribute, so we don't end up returning an object + # with type _Base64String + setattr(return_obj, name, value) + else: + value = _fill_data_minidom(node, name, value) + if value is not None: + setattr(return_obj, name, value) + + +def _parse_response_body_from_xml_node(node, return_type): + ''' + parse the xml and fill all the data into a class of return_type + ''' + return_obj = return_type() + _fill_data_to_return_object(node, return_obj) + + return return_obj + + +def _parse_response_body_from_xml_text(respbody, return_type): + ''' + parse the xml and fill all the data into a class of return_type + ''' + doc = minidom.parseString(respbody) + return_obj = return_type() + xml_name = return_type._xml_name if hasattr(return_type, '_xml_name') else return_type.__name__ + for node in _get_child_nodes(doc, xml_name): + _fill_data_to_return_object(node, return_obj) + + return return_obj + +def _parse_response_body_from_service_resources_xml_text(respbody, return_type): + ''' + parse the xml and fill all the data into a class of return_type + ''' + doc = minidom.parseString(respbody) + return_obj = _list_of(return_type) + for node in _get_children_from_path(doc, "ServiceResources", "ServiceResource"): + local_obj = return_type() + _fill_data_to_return_object(node, local_obj) + return_obj.append(local_obj) + + return return_obj + +class _dict_of(dict): + + """a dict which carries with it the xml element names for key,val. + Used for deserializaion and construction of the lists""" + + def __init__(self, pair_xml_element_name, key_xml_element_name, + value_xml_element_name): + self.pair_xml_element_name = pair_xml_element_name + self.key_xml_element_name = key_xml_element_name + self.value_xml_element_name = value_xml_element_name + super(_dict_of, self).__init__() + + +class _list_of(list): + + """a list which carries with it the type that's expected to go in it. + Used for deserializaion and construction of the lists""" + + def __init__(self, list_type, xml_element_name=None): + self.list_type = list_type + if xml_element_name is None: + self.xml_element_name = list_type.__name__ + else: + self.xml_element_name = xml_element_name + super(_list_of, self).__init__() + + +class _scalar_list_of(list): + + """a list of scalar types which carries with it the type that's + expected to go in it along with its xml element name. + Used for deserializaion and construction of the lists""" + + def __init__(self, list_type, xml_element_name): + self.list_type = list_type + self.xml_element_name = xml_element_name + super(_scalar_list_of, self).__init__() + +class _xml_attribute: + + """a accessor to XML attributes + expected to go in it along with its xml element name. + Used for deserialization and construction""" + + def __init__(self, xml_element_name): + self.xml_element_name = xml_element_name + + +def _update_request_uri_query_local_storage(request, use_local_storage): + ''' create correct uri and query for the request ''' + uri, query = _update_request_uri_query(request) + if use_local_storage: + return '/' + DEV_ACCOUNT_NAME + uri, query + return uri, query + + +def _update_request_uri_query(request): + '''pulls the query string out of the URI and moves it into + the query portion of the request object. If there are already + query parameters on the request the parameters in the URI will + appear after the existing parameters''' + + if '?' in request.path: + request.path, _, query_string = request.path.partition('?') + if query_string: + query_params = query_string.split('&') + for query in query_params: + if '=' in query: + name, _, value = query.partition('=') + request.query.append((name, value)) + + request.path = url_quote(request.path, '/()$=\',') + + # add encoded queries to request.path. + if request.query: + request.path += '?' + for name, value in request.query: + if value is not None: + request.path += name + '=' + url_quote(value, '/()$=\',') + '&' + request.path = request.path[:-1] + + return request.path, request.query + + +def _dont_fail_on_exist(error): + ''' don't throw exception if the resource exists. + This is called by create_* APIs with fail_on_exist=False''' + if isinstance(error, WindowsAzureConflictError): + return False + else: + raise error + + +def _dont_fail_not_exist(error): + ''' don't throw exception if the resource doesn't exist. + This is called by create_* APIs with fail_on_exist=False''' + if isinstance(error, WindowsAzureMissingResourceError): + return False + else: + raise error + + +def _general_error_handler(http_error): + ''' Simple error handler for azure.''' + if http_error.status == 409: + raise WindowsAzureConflictError( + _ERROR_CONFLICT.format(str(http_error))) + elif http_error.status == 404: + raise WindowsAzureMissingResourceError( + _ERROR_NOT_FOUND.format(str(http_error))) + else: + if http_error.respbody is not None: + raise WindowsAzureError( + _ERROR_UNKNOWN.format(str(http_error)) + '\n' + \ + http_error.respbody.decode('utf-8')) + else: + raise WindowsAzureError(_ERROR_UNKNOWN.format(str(http_error))) + + +def _parse_response_for_dict(response): + ''' Extracts name-values from response header. Filter out the standard + http headers.''' + + if response is None: + return None + http_headers = ['server', 'date', 'location', 'host', + 'via', 'proxy-connection', 'connection'] + return_dict = HeaderDict() + if response.headers: + for name, value in response.headers: + if not name.lower() in http_headers: + return_dict[name] = value + + return return_dict + + +def _parse_response_for_dict_prefix(response, prefixes): + ''' Extracts name-values for names starting with prefix from response + header. Filter out the standard http headers.''' + + if response is None: + return None + return_dict = {} + orig_dict = _parse_response_for_dict(response) + if orig_dict: + for name, value in orig_dict.items(): + for prefix_value in prefixes: + if name.lower().startswith(prefix_value.lower()): + return_dict[name] = value + break + return return_dict + else: + return None + + +def _parse_response_for_dict_filter(response, filter): + ''' Extracts name-values for names in filter from response header. Filter + out the standard http headers.''' + if response is None: + return None + return_dict = {} + orig_dict = _parse_response_for_dict(response) + if orig_dict: + for name, value in orig_dict.items(): + if name.lower() in filter: + return_dict[name] = value + return return_dict + else: + return None + + +def _sign_string(key, string_to_sign, key_is_base64=True): + if key_is_base64: + key = _decode_base64_to_bytes(key) + else: + if isinstance(key, _unicode_type): + key = key.encode('utf-8') + if isinstance(string_to_sign, _unicode_type): + string_to_sign = string_to_sign.encode('utf-8') + signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) + digest = signed_hmac_sha256.digest() + encoded_digest = _encode_base64(digest) + return encoded_digest diff --git a/awx/lib/site-packages/azure/azure.pyproj b/awx/lib/site-packages/azure/azure.pyproj new file mode 100644 index 0000000000..bb79dc0229 --- /dev/null +++ b/awx/lib/site-packages/azure/azure.pyproj @@ -0,0 +1,81 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0"> + <PropertyGroup> + <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> + <SchemaVersion>2.0</SchemaVersion> + <ProjectGuid>{25b2c65a-0553-4452-8907-8b5b17544e68}</ProjectGuid> + <ProjectHome> + </ProjectHome> + <StartupFile>storage\blobservice.py</StartupFile> + <SearchPath>..</SearchPath> + <WorkingDirectory>.</WorkingDirectory> + <OutputPath>.</OutputPath> + <Name>azure</Name> + <RootNamespace>azure</RootNamespace> + <IsWindowsApplication>False</IsWindowsApplication> + <LaunchProvider>Standard Python launcher</LaunchProvider> + <CommandLineArguments /> + <InterpreterPath /> + <InterpreterArguments /> + <InterpreterId>{2af0f10d-7135-4994-9156-5d01c9c11b7e}</InterpreterId> + <InterpreterVersion>2.7</InterpreterVersion> + <SccProjectName>SAK</SccProjectName> + <SccProvider>SAK</SccProvider> + <SccAuxPath>SAK</SccAuxPath> + <SccLocalPath>SAK</SccLocalPath> + </PropertyGroup> + <PropertyGroup Condition=" '$(Configuration)' == 'Debug' "> + <DebugSymbols>true</DebugSymbols> + <EnableUnmanagedDebugging>false</EnableUnmanagedDebugging> + </PropertyGroup> + <PropertyGroup Condition=" '$(Configuration)' == 'Release' "> + <DebugSymbols>true</DebugSymbols> + <EnableUnmanagedDebugging>false</EnableUnmanagedDebugging> + </PropertyGroup> + <ItemGroup> + <Compile Include="http\batchclient.py" /> + <Compile Include="http\httpclient.py" /> + <Compile Include="http\requestsclient.py" /> + <Compile Include="http\winhttp.py" /> + <Compile Include="http\__init__.py" /> + <Compile Include="servicemanagement\schedulermanagementservice.py" /> + <Compile Include="servicemanagement\servicebusmanagementservice.py" /> + <Compile Include="servicemanagement\servicemanagementclient.py" /> + <Compile Include="servicemanagement\servicemanagementservice.py" /> + <Compile Include="servicemanagement\sqldatabasemanagementservice.py" /> + <Compile Include="servicemanagement\websitemanagementservice.py" /> + <Compile Include="servicemanagement\__init__.py" /> + <Compile Include="servicebus\servicebusservice.py" /> + <Compile Include="storage\blobservice.py" /> + <Compile Include="storage\queueservice.py" /> + <Compile Include="storage\cloudstorageaccount.py" /> + <Compile Include="storage\tableservice.py" /> + <Compile Include="storage\sharedaccesssignature.py" /> + <Compile Include="__init__.py" /> + <Compile Include="servicebus\__init__.py" /> + <Compile Include="storage\storageclient.py" /> + <Compile Include="storage\__init__.py" /> + </ItemGroup> + <ItemGroup> + <Folder Include="http" /> + <Folder Include="servicemanagement" /> + <Folder Include="servicebus" /> + <Folder Include="storage" /> + </ItemGroup> + <ItemGroup> + <InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\2.6" /> + <InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\2.7" /> + <InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\3.3" /> + <InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\3.4" /> + <InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\2.7" /> + <InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\3.3" /> + <InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\3.4" /> + </ItemGroup> + <PropertyGroup> + <VisualStudioVersion Condition="'$(VisualStudioVersion)' == ''">10.0</VisualStudioVersion> + <VSToolsPath Condition="'$(VSToolsPath)' == ''">$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)</VSToolsPath> + <PtvsTargetsFile>$(VSToolsPath)\Python Tools\Microsoft.PythonTools.targets</PtvsTargetsFile> + </PropertyGroup> + <Import Condition="Exists($(PtvsTargetsFile))" Project="$(PtvsTargetsFile)" /> + <Import Condition="!Exists($(PtvsTargetsFile))" Project="$(MSBuildToolsPath)\Microsoft.Common.targets" /> +</Project> \ No newline at end of file diff --git a/awx/lib/site-packages/azure/http/__init__.py b/awx/lib/site-packages/azure/http/__init__.py index 3bc1e258db..680d5b5ad5 100644 --- a/awx/lib/site-packages/azure/http/__init__.py +++ b/awx/lib/site-packages/azure/http/__init__.py @@ -1,73 +1,73 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- - -HTTP_RESPONSE_NO_CONTENT = 204 - - -class HTTPError(Exception): - - ''' HTTP Exception when response status code >= 300 ''' - - def __init__(self, status, message, respheader, respbody): - '''Creates a new HTTPError with the specified status, message, - response headers and body''' - self.status = status - self.respheader = respheader - self.respbody = respbody - Exception.__init__(self, message) - - -class HTTPResponse(object): - - """Represents a response from an HTTP request. An HTTPResponse has the - following attributes: - - status: the status code of the response - message: the message - headers: the returned headers, as a list of (name, value) pairs - body: the body of the response - """ - - def __init__(self, status, message, headers, body): - self.status = status - self.message = message - self.headers = headers - self.body = body - - -class HTTPRequest(object): - - '''Represents an HTTP Request. An HTTP Request consists of the following - attributes: - - host: the host name to connect to - method: the method to use to connect (string such as GET, POST, PUT, etc.) - path: the uri fragment - query: query parameters specified as a list of (name, value) pairs - headers: header values specified as (name, value) pairs - body: the body of the request. - protocol_override: - specify to use this protocol instead of the global one stored in - _HTTPClient. - ''' - - def __init__(self): - self.host = '' - self.method = '' - self.path = '' - self.query = [] # list of (name, value) - self.headers = [] # list of (header name, header value) - self.body = '' - self.protocol_override = None +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +HTTP_RESPONSE_NO_CONTENT = 204 + + +class HTTPError(Exception): + + ''' HTTP Exception when response status code >= 300 ''' + + def __init__(self, status, message, respheader, respbody): + '''Creates a new HTTPError with the specified status, message, + response headers and body''' + self.status = status + self.respheader = respheader + self.respbody = respbody + Exception.__init__(self, message) + + +class HTTPResponse(object): + + """Represents a response from an HTTP request. An HTTPResponse has the + following attributes: + + status: the status code of the response + message: the message + headers: the returned headers, as a list of (name, value) pairs + body: the body of the response + """ + + def __init__(self, status, message, headers, body): + self.status = status + self.message = message + self.headers = headers + self.body = body + + +class HTTPRequest(object): + + '''Represents an HTTP Request. An HTTP Request consists of the following + attributes: + + host: the host name to connect to + method: the method to use to connect (string such as GET, POST, PUT, etc.) + path: the uri fragment + query: query parameters specified as a list of (name, value) pairs + headers: header values specified as (name, value) pairs + body: the body of the request. + protocol_override: + specify to use this protocol instead of the global one stored in + _HTTPClient. + ''' + + def __init__(self): + self.host = '' + self.method = '' + self.path = '' + self.query = [] # list of (name, value) + self.headers = [] # list of (header name, header value) + self.body = '' + self.protocol_override = None diff --git a/awx/lib/site-packages/azure/http/batchclient.py b/awx/lib/site-packages/azure/http/batchclient.py index 0e6d60d3bb..9bd3223e15 100644 --- a/awx/lib/site-packages/azure/http/batchclient.py +++ b/awx/lib/site-packages/azure/http/batchclient.py @@ -1,339 +1,339 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import sys -import uuid - -from azure import ( - _update_request_uri_query, - WindowsAzureError, - WindowsAzureBatchOperationError, - _get_children_from_path, - url_unquote, - _ERROR_CANNOT_FIND_PARTITION_KEY, - _ERROR_CANNOT_FIND_ROW_KEY, - _ERROR_INCORRECT_TABLE_IN_BATCH, - _ERROR_INCORRECT_PARTITION_KEY_IN_BATCH, - _ERROR_DUPLICATE_ROW_KEY_IN_BATCH, - _ERROR_BATCH_COMMIT_FAIL, - ) -from azure.http import HTTPError, HTTPRequest, HTTPResponse -from azure.http.httpclient import _HTTPClient -from azure.storage import ( - _update_storage_table_header, - METADATA_NS, - _sign_storage_table_request, - ) -from xml.dom import minidom - -_DATASERVICES_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices' - -if sys.version_info < (3,): - def _new_boundary(): - return str(uuid.uuid1()) -else: - def _new_boundary(): - return str(uuid.uuid1()).encode('utf-8') - - -class _BatchClient(_HTTPClient): - - ''' - This is the class that is used for batch operation for storage table - service. It only supports one changeset. - ''' - - def __init__(self, service_instance, account_key, account_name, - protocol='http'): - _HTTPClient.__init__(self, service_instance, account_name=account_name, - account_key=account_key, protocol=protocol) - self.is_batch = False - self.batch_requests = [] - self.batch_table = '' - self.batch_partition_key = '' - self.batch_row_keys = [] - - def get_request_table(self, request): - ''' - Extracts table name from request.uri. The request.uri has either - "/mytable(...)" or "/mytable" format. - - request: the request to insert, update or delete entity - ''' - if '(' in request.path: - pos = request.path.find('(') - return request.path[1:pos] - else: - return request.path[1:] - - def get_request_partition_key(self, request): - ''' - Extracts PartitionKey from request.body if it is a POST request or from - request.path if it is not a POST request. Only insert operation request - is a POST request and the PartitionKey is in the request body. - - request: the request to insert, update or delete entity - ''' - if request.method == 'POST': - doc = minidom.parseString(request.body) - part_key = _get_children_from_path( - doc, 'entry', 'content', (METADATA_NS, 'properties'), - (_DATASERVICES_NS, 'PartitionKey')) - if not part_key: - raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY) - return part_key[0].firstChild.nodeValue - else: - uri = url_unquote(request.path) - pos1 = uri.find('PartitionKey=\'') - pos2 = uri.find('\',', pos1) - if pos1 == -1 or pos2 == -1: - raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY) - return uri[pos1 + len('PartitionKey=\''):pos2] - - def get_request_row_key(self, request): - ''' - Extracts RowKey from request.body if it is a POST request or from - request.path if it is not a POST request. Only insert operation request - is a POST request and the Rowkey is in the request body. - - request: the request to insert, update or delete entity - ''' - if request.method == 'POST': - doc = minidom.parseString(request.body) - row_key = _get_children_from_path( - doc, 'entry', 'content', (METADATA_NS, 'properties'), - (_DATASERVICES_NS, 'RowKey')) - if not row_key: - raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY) - return row_key[0].firstChild.nodeValue - else: - uri = url_unquote(request.path) - pos1 = uri.find('RowKey=\'') - pos2 = uri.find('\')', pos1) - if pos1 == -1 or pos2 == -1: - raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY) - row_key = uri[pos1 + len('RowKey=\''):pos2] - return row_key - - def validate_request_table(self, request): - ''' - Validates that all requests have the same table name. Set the table - name if it is the first request for the batch operation. - - request: the request to insert, update or delete entity - ''' - if self.batch_table: - if self.get_request_table(request) != self.batch_table: - raise WindowsAzureError(_ERROR_INCORRECT_TABLE_IN_BATCH) - else: - self.batch_table = self.get_request_table(request) - - def validate_request_partition_key(self, request): - ''' - Validates that all requests have the same PartitiionKey. Set the - PartitionKey if it is the first request for the batch operation. - - request: the request to insert, update or delete entity - ''' - if self.batch_partition_key: - if self.get_request_partition_key(request) != \ - self.batch_partition_key: - raise WindowsAzureError(_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH) - else: - self.batch_partition_key = self.get_request_partition_key(request) - - def validate_request_row_key(self, request): - ''' - Validates that all requests have the different RowKey and adds RowKey - to existing RowKey list. - - request: the request to insert, update or delete entity - ''' - if self.batch_row_keys: - if self.get_request_row_key(request) in self.batch_row_keys: - raise WindowsAzureError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH) - else: - self.batch_row_keys.append(self.get_request_row_key(request)) - - def begin_batch(self): - ''' - Starts the batch operation. Intializes the batch variables - - is_batch: batch operation flag. - batch_table: the table name of the batch operation - batch_partition_key: the PartitionKey of the batch requests. - batch_row_keys: the RowKey list of adding requests. - batch_requests: the list of the requests. - ''' - self.is_batch = True - self.batch_table = '' - self.batch_partition_key = '' - self.batch_row_keys = [] - self.batch_requests = [] - - def insert_request_to_batch(self, request): - ''' - Adds request to batch operation. - - request: the request to insert, update or delete entity - ''' - self.validate_request_table(request) - self.validate_request_partition_key(request) - self.validate_request_row_key(request) - self.batch_requests.append(request) - - def commit_batch(self): - ''' Resets batch flag and commits the batch requests. ''' - if self.is_batch: - self.is_batch = False - self.commit_batch_requests() - - def commit_batch_requests(self): - ''' Commits the batch requests. ''' - - batch_boundary = b'batch_' + _new_boundary() - changeset_boundary = b'changeset_' + _new_boundary() - - # Commits batch only the requests list is not empty. - if self.batch_requests: - request = HTTPRequest() - request.method = 'POST' - request.host = self.batch_requests[0].host - request.path = '/$batch' - request.headers = [ - ('Content-Type', 'multipart/mixed; boundary=' + \ - batch_boundary.decode('utf-8')), - ('Accept', 'application/atom+xml,application/xml'), - ('Accept-Charset', 'UTF-8')] - - request.body = b'--' + batch_boundary + b'\n' - request.body += b'Content-Type: multipart/mixed; boundary=' - request.body += changeset_boundary + b'\n\n' - - content_id = 1 - - # Adds each request body to the POST data. - for batch_request in self.batch_requests: - request.body += b'--' + changeset_boundary + b'\n' - request.body += b'Content-Type: application/http\n' - request.body += b'Content-Transfer-Encoding: binary\n\n' - request.body += batch_request.method.encode('utf-8') - request.body += b' http://' - request.body += batch_request.host.encode('utf-8') - request.body += batch_request.path.encode('utf-8') - request.body += b' HTTP/1.1\n' - request.body += b'Content-ID: ' - request.body += str(content_id).encode('utf-8') + b'\n' - content_id += 1 - - # Add different headers for different type requests. - if not batch_request.method == 'DELETE': - request.body += \ - b'Content-Type: application/atom+xml;type=entry\n' - for name, value in batch_request.headers: - if name == 'If-Match': - request.body += name.encode('utf-8') + b': ' - request.body += value.encode('utf-8') + b'\n' - break - request.body += b'Content-Length: ' - request.body += str(len(batch_request.body)).encode('utf-8') - request.body += b'\n\n' - request.body += batch_request.body + b'\n' - else: - for name, value in batch_request.headers: - # If-Match should be already included in - # batch_request.headers, but in case it is missing, - # just add it. - if name == 'If-Match': - request.body += name.encode('utf-8') + b': ' - request.body += value.encode('utf-8') + b'\n\n' - break - else: - request.body += b'If-Match: *\n\n' - - request.body += b'--' + changeset_boundary + b'--' + b'\n' - request.body += b'--' + batch_boundary + b'--' - - request.path, request.query = _update_request_uri_query(request) - request.headers = _update_storage_table_header(request) - auth = _sign_storage_table_request(request, - self.account_name, - self.account_key) - request.headers.append(('Authorization', auth)) - - # Submit the whole request as batch request. - response = self.perform_request(request) - if response.status >= 300: - raise HTTPError(response.status, - _ERROR_BATCH_COMMIT_FAIL, - self.respheader, - response.body) - - # http://www.odata.org/documentation/odata-version-2-0/batch-processing/ - # The body of a ChangeSet response is either a response for all the - # successfully processed change request within the ChangeSet, - # formatted exactly as it would have appeared outside of a batch, - # or a single response indicating a failure of the entire ChangeSet. - responses = self._parse_batch_response(response.body) - if responses and responses[0].status >= 300: - self._report_batch_error(responses[0]) - - def cancel_batch(self): - ''' Resets the batch flag. ''' - self.is_batch = False - - def _parse_batch_response(self, body): - parts = body.split(b'--changesetresponse_') - - responses = [] - for part in parts: - httpLocation = part.find(b'HTTP/') - if httpLocation > 0: - response = self._parse_batch_response_part(part[httpLocation:]) - responses.append(response) - - return responses - - def _parse_batch_response_part(self, part): - lines = part.splitlines(); - - # First line is the HTTP status/reason - status, _, reason = lines[0].partition(b' ')[2].partition(b' ') - - # Followed by headers and body - headers = [] - body = b'' - isBody = False - for line in lines[1:]: - if line == b'' and not isBody: - isBody = True - elif isBody: - body += line - else: - headerName, _, headerVal = line.partition(b':') - headers.append((headerName.lower(), headerVal)) - - return HTTPResponse(int(status), reason.strip(), headers, body) - - def _report_batch_error(self, response): - xml = response.body.decode('utf-8') - doc = minidom.parseString(xml) - - n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'code') - code = n[0].firstChild.nodeValue if n and n[0].firstChild else '' - - n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'message') - message = n[0].firstChild.nodeValue if n and n[0].firstChild else xml - - raise WindowsAzureBatchOperationError(message, code) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import sys +import uuid + +from azure import ( + _update_request_uri_query, + WindowsAzureError, + WindowsAzureBatchOperationError, + _get_children_from_path, + url_unquote, + _ERROR_CANNOT_FIND_PARTITION_KEY, + _ERROR_CANNOT_FIND_ROW_KEY, + _ERROR_INCORRECT_TABLE_IN_BATCH, + _ERROR_INCORRECT_PARTITION_KEY_IN_BATCH, + _ERROR_DUPLICATE_ROW_KEY_IN_BATCH, + _ERROR_BATCH_COMMIT_FAIL, + ) +from azure.http import HTTPError, HTTPRequest, HTTPResponse +from azure.http.httpclient import _HTTPClient +from azure.storage import ( + _update_storage_table_header, + METADATA_NS, + _sign_storage_table_request, + ) +from xml.dom import minidom + +_DATASERVICES_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices' + +if sys.version_info < (3,): + def _new_boundary(): + return str(uuid.uuid1()) +else: + def _new_boundary(): + return str(uuid.uuid1()).encode('utf-8') + + +class _BatchClient(_HTTPClient): + + ''' + This is the class that is used for batch operation for storage table + service. It only supports one changeset. + ''' + + def __init__(self, service_instance, account_key, account_name, + protocol='http'): + _HTTPClient.__init__(self, service_instance, account_name=account_name, + account_key=account_key, protocol=protocol) + self.is_batch = False + self.batch_requests = [] + self.batch_table = '' + self.batch_partition_key = '' + self.batch_row_keys = [] + + def get_request_table(self, request): + ''' + Extracts table name from request.uri. The request.uri has either + "/mytable(...)" or "/mytable" format. + + request: the request to insert, update or delete entity + ''' + if '(' in request.path: + pos = request.path.find('(') + return request.path[1:pos] + else: + return request.path[1:] + + def get_request_partition_key(self, request): + ''' + Extracts PartitionKey from request.body if it is a POST request or from + request.path if it is not a POST request. Only insert operation request + is a POST request and the PartitionKey is in the request body. + + request: the request to insert, update or delete entity + ''' + if request.method == 'POST': + doc = minidom.parseString(request.body) + part_key = _get_children_from_path( + doc, 'entry', 'content', (METADATA_NS, 'properties'), + (_DATASERVICES_NS, 'PartitionKey')) + if not part_key: + raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY) + return part_key[0].firstChild.nodeValue + else: + uri = url_unquote(request.path) + pos1 = uri.find('PartitionKey=\'') + pos2 = uri.find('\',', pos1) + if pos1 == -1 or pos2 == -1: + raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY) + return uri[pos1 + len('PartitionKey=\''):pos2] + + def get_request_row_key(self, request): + ''' + Extracts RowKey from request.body if it is a POST request or from + request.path if it is not a POST request. Only insert operation request + is a POST request and the Rowkey is in the request body. + + request: the request to insert, update or delete entity + ''' + if request.method == 'POST': + doc = minidom.parseString(request.body) + row_key = _get_children_from_path( + doc, 'entry', 'content', (METADATA_NS, 'properties'), + (_DATASERVICES_NS, 'RowKey')) + if not row_key: + raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY) + return row_key[0].firstChild.nodeValue + else: + uri = url_unquote(request.path) + pos1 = uri.find('RowKey=\'') + pos2 = uri.find('\')', pos1) + if pos1 == -1 or pos2 == -1: + raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY) + row_key = uri[pos1 + len('RowKey=\''):pos2] + return row_key + + def validate_request_table(self, request): + ''' + Validates that all requests have the same table name. Set the table + name if it is the first request for the batch operation. + + request: the request to insert, update or delete entity + ''' + if self.batch_table: + if self.get_request_table(request) != self.batch_table: + raise WindowsAzureError(_ERROR_INCORRECT_TABLE_IN_BATCH) + else: + self.batch_table = self.get_request_table(request) + + def validate_request_partition_key(self, request): + ''' + Validates that all requests have the same PartitiionKey. Set the + PartitionKey if it is the first request for the batch operation. + + request: the request to insert, update or delete entity + ''' + if self.batch_partition_key: + if self.get_request_partition_key(request) != \ + self.batch_partition_key: + raise WindowsAzureError(_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH) + else: + self.batch_partition_key = self.get_request_partition_key(request) + + def validate_request_row_key(self, request): + ''' + Validates that all requests have the different RowKey and adds RowKey + to existing RowKey list. + + request: the request to insert, update or delete entity + ''' + if self.batch_row_keys: + if self.get_request_row_key(request) in self.batch_row_keys: + raise WindowsAzureError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH) + else: + self.batch_row_keys.append(self.get_request_row_key(request)) + + def begin_batch(self): + ''' + Starts the batch operation. Intializes the batch variables + + is_batch: batch operation flag. + batch_table: the table name of the batch operation + batch_partition_key: the PartitionKey of the batch requests. + batch_row_keys: the RowKey list of adding requests. + batch_requests: the list of the requests. + ''' + self.is_batch = True + self.batch_table = '' + self.batch_partition_key = '' + self.batch_row_keys = [] + self.batch_requests = [] + + def insert_request_to_batch(self, request): + ''' + Adds request to batch operation. + + request: the request to insert, update or delete entity + ''' + self.validate_request_table(request) + self.validate_request_partition_key(request) + self.validate_request_row_key(request) + self.batch_requests.append(request) + + def commit_batch(self): + ''' Resets batch flag and commits the batch requests. ''' + if self.is_batch: + self.is_batch = False + self.commit_batch_requests() + + def commit_batch_requests(self): + ''' Commits the batch requests. ''' + + batch_boundary = b'batch_' + _new_boundary() + changeset_boundary = b'changeset_' + _new_boundary() + + # Commits batch only the requests list is not empty. + if self.batch_requests: + request = HTTPRequest() + request.method = 'POST' + request.host = self.batch_requests[0].host + request.path = '/$batch' + request.headers = [ + ('Content-Type', 'multipart/mixed; boundary=' + \ + batch_boundary.decode('utf-8')), + ('Accept', 'application/atom+xml,application/xml'), + ('Accept-Charset', 'UTF-8')] + + request.body = b'--' + batch_boundary + b'\n' + request.body += b'Content-Type: multipart/mixed; boundary=' + request.body += changeset_boundary + b'\n\n' + + content_id = 1 + + # Adds each request body to the POST data. + for batch_request in self.batch_requests: + request.body += b'--' + changeset_boundary + b'\n' + request.body += b'Content-Type: application/http\n' + request.body += b'Content-Transfer-Encoding: binary\n\n' + request.body += batch_request.method.encode('utf-8') + request.body += b' http://' + request.body += batch_request.host.encode('utf-8') + request.body += batch_request.path.encode('utf-8') + request.body += b' HTTP/1.1\n' + request.body += b'Content-ID: ' + request.body += str(content_id).encode('utf-8') + b'\n' + content_id += 1 + + # Add different headers for different type requests. + if not batch_request.method == 'DELETE': + request.body += \ + b'Content-Type: application/atom+xml;type=entry\n' + for name, value in batch_request.headers: + if name == 'If-Match': + request.body += name.encode('utf-8') + b': ' + request.body += value.encode('utf-8') + b'\n' + break + request.body += b'Content-Length: ' + request.body += str(len(batch_request.body)).encode('utf-8') + request.body += b'\n\n' + request.body += batch_request.body + b'\n' + else: + for name, value in batch_request.headers: + # If-Match should be already included in + # batch_request.headers, but in case it is missing, + # just add it. + if name == 'If-Match': + request.body += name.encode('utf-8') + b': ' + request.body += value.encode('utf-8') + b'\n\n' + break + else: + request.body += b'If-Match: *\n\n' + + request.body += b'--' + changeset_boundary + b'--' + b'\n' + request.body += b'--' + batch_boundary + b'--' + + request.path, request.query = _update_request_uri_query(request) + request.headers = _update_storage_table_header(request) + auth = _sign_storage_table_request(request, + self.account_name, + self.account_key) + request.headers.append(('Authorization', auth)) + + # Submit the whole request as batch request. + response = self.perform_request(request) + if response.status >= 300: + raise HTTPError(response.status, + _ERROR_BATCH_COMMIT_FAIL, + self.respheader, + response.body) + + # http://www.odata.org/documentation/odata-version-2-0/batch-processing/ + # The body of a ChangeSet response is either a response for all the + # successfully processed change request within the ChangeSet, + # formatted exactly as it would have appeared outside of a batch, + # or a single response indicating a failure of the entire ChangeSet. + responses = self._parse_batch_response(response.body) + if responses and responses[0].status >= 300: + self._report_batch_error(responses[0]) + + def cancel_batch(self): + ''' Resets the batch flag. ''' + self.is_batch = False + + def _parse_batch_response(self, body): + parts = body.split(b'--changesetresponse_') + + responses = [] + for part in parts: + httpLocation = part.find(b'HTTP/') + if httpLocation > 0: + response = self._parse_batch_response_part(part[httpLocation:]) + responses.append(response) + + return responses + + def _parse_batch_response_part(self, part): + lines = part.splitlines(); + + # First line is the HTTP status/reason + status, _, reason = lines[0].partition(b' ')[2].partition(b' ') + + # Followed by headers and body + headers = [] + body = b'' + isBody = False + for line in lines[1:]: + if line == b'' and not isBody: + isBody = True + elif isBody: + body += line + else: + headerName, _, headerVal = line.partition(b':') + headers.append((headerName.lower(), headerVal)) + + return HTTPResponse(int(status), reason.strip(), headers, body) + + def _report_batch_error(self, response): + xml = response.body.decode('utf-8') + doc = minidom.parseString(xml) + + n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'code') + code = n[0].firstChild.nodeValue if n and n[0].firstChild else '' + + n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'message') + message = n[0].firstChild.nodeValue if n and n[0].firstChild else xml + + raise WindowsAzureBatchOperationError(message, code) diff --git a/awx/lib/site-packages/azure/http/httpclient.py b/awx/lib/site-packages/azure/http/httpclient.py index 662d60e92c..1bdeb73102 100644 --- a/awx/lib/site-packages/azure/http/httpclient.py +++ b/awx/lib/site-packages/azure/http/httpclient.py @@ -1,223 +1,251 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import base64 -import os -import sys - -if sys.version_info < (3,): - from httplib import ( - HTTPSConnection, - HTTPConnection, - HTTP_PORT, - HTTPS_PORT, - ) - from urlparse import urlparse -else: - from http.client import ( - HTTPSConnection, - HTTPConnection, - HTTP_PORT, - HTTPS_PORT, - ) - from urllib.parse import urlparse - -from azure.http import HTTPError, HTTPResponse -from azure import _USER_AGENT_STRING, _update_request_uri_query - - -class _HTTPClient(object): - - ''' - Takes the request and sends it to cloud service and returns the response. - ''' - - def __init__(self, service_instance, cert_file=None, account_name=None, - account_key=None, service_namespace=None, issuer=None, - protocol='https'): - ''' - service_instance: service client instance. - cert_file: - certificate file name/location. This is only used in hosted - service management. - account_name: the storage account. - account_key: - the storage account access key for storage services or servicebus - access key for service bus service. - service_namespace: the service namespace for service bus. - issuer: the issuer for service bus service. - ''' - self.service_instance = service_instance - self.status = None - self.respheader = None - self.message = None - self.cert_file = cert_file - self.account_name = account_name - self.account_key = account_key - self.service_namespace = service_namespace - self.issuer = issuer - self.protocol = protocol - self.proxy_host = None - self.proxy_port = None - self.proxy_user = None - self.proxy_password = None - self.use_httplib = self.should_use_httplib() - - def should_use_httplib(self): - if sys.platform.lower().startswith('win') and self.cert_file: - # On Windows, auto-detect between Windows Store Certificate - # (winhttp) and OpenSSL .pem certificate file (httplib). - # - # We used to only support certificates installed in the Windows - # Certificate Store. - # cert_file example: CURRENT_USER\my\CertificateName - # - # We now support using an OpenSSL .pem certificate file, - # for a consistent experience across all platforms. - # cert_file example: account\certificate.pem - # - # When using OpenSSL .pem certificate file on Windows, make sure - # you are on CPython 2.7.4 or later. - - # If it's not an existing file on disk, then treat it as a path in - # the Windows Certificate Store, which means we can't use httplib. - if not os.path.isfile(self.cert_file): - return False - - return True - - def set_proxy(self, host, port, user, password): - ''' - Sets the proxy server host and port for the HTTP CONNECT Tunnelling. - - host: Address of the proxy. Ex: '192.168.0.100' - port: Port of the proxy. Ex: 6000 - user: User for proxy authorization. - password: Password for proxy authorization. - ''' - self.proxy_host = host - self.proxy_port = port - self.proxy_user = user - self.proxy_password = password - - def get_connection(self, request): - ''' Create connection for the request. ''' - protocol = request.protocol_override \ - if request.protocol_override else self.protocol - target_host = request.host - target_port = HTTP_PORT if protocol == 'http' else HTTPS_PORT - - if not self.use_httplib: - import azure.http.winhttp - connection = azure.http.winhttp._HTTPConnection( - target_host, cert_file=self.cert_file, protocol=protocol) - proxy_host = self.proxy_host - proxy_port = self.proxy_port - else: - if ':' in target_host: - target_host, _, target_port = target_host.rpartition(':') - if self.proxy_host: - proxy_host = target_host - proxy_port = target_port - host = self.proxy_host - port = self.proxy_port - else: - host = target_host - port = target_port - - if protocol == 'http': - connection = HTTPConnection(host, int(port)) - else: - connection = HTTPSConnection( - host, int(port), cert_file=self.cert_file) - - if self.proxy_host: - headers = None - if self.proxy_user and self.proxy_password: - auth = base64.encodestring( - "{0}:{1}".format(self.proxy_user, self.proxy_password)) - headers = {'Proxy-Authorization': 'Basic {0}'.format(auth)} - connection.set_tunnel(proxy_host, int(proxy_port), headers) - - return connection - - def send_request_headers(self, connection, request_headers): - if self.use_httplib: - if self.proxy_host: - for i in connection._buffer: - if i.startswith("Host: "): - connection._buffer.remove(i) - connection.putheader( - 'Host', "{0}:{1}".format(connection._tunnel_host, - connection._tunnel_port)) - - for name, value in request_headers: - if value: - connection.putheader(name, value) - - connection.putheader('User-Agent', _USER_AGENT_STRING) - connection.endheaders() - - def send_request_body(self, connection, request_body): - if request_body: - assert isinstance(request_body, bytes) - connection.send(request_body) - elif (not isinstance(connection, HTTPSConnection) and - not isinstance(connection, HTTPConnection)): - connection.send(None) - - def perform_request(self, request): - ''' Sends request to cloud service server and return the response. ''' - connection = self.get_connection(request) - try: - connection.putrequest(request.method, request.path) - - if not self.use_httplib: - if self.proxy_host and self.proxy_user: - connection.set_proxy_credentials( - self.proxy_user, self.proxy_password) - - self.send_request_headers(connection, request.headers) - self.send_request_body(connection, request.body) - - resp = connection.getresponse() - self.status = int(resp.status) - self.message = resp.reason - self.respheader = headers = resp.getheaders() - - # for consistency across platforms, make header names lowercase - for i, value in enumerate(headers): - headers[i] = (value[0].lower(), value[1]) - - respbody = None - if resp.length is None: - respbody = resp.read() - elif resp.length > 0: - respbody = resp.read(resp.length) - - response = HTTPResponse( - int(resp.status), resp.reason, headers, respbody) - if self.status == 307: - new_url = urlparse(dict(headers)['location']) - request.host = new_url.hostname - request.path = new_url.path - request.path, request.query = _update_request_uri_query(request) - return self.perform_request(request) - if self.status >= 300: - raise HTTPError(self.status, self.message, - self.respheader, respbody) - - return response - finally: - connection.close() +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import base64 +import os +import sys + +if sys.version_info < (3,): + from httplib import ( + HTTPSConnection, + HTTPConnection, + HTTP_PORT, + HTTPS_PORT, + ) + from urlparse import urlparse +else: + from http.client import ( + HTTPSConnection, + HTTPConnection, + HTTP_PORT, + HTTPS_PORT, + ) + from urllib.parse import urlparse + +from azure.http import HTTPError, HTTPResponse +from azure import _USER_AGENT_STRING, _update_request_uri_query + +DEBUG_REQUESTS = False +DEBUG_RESPONSES = False + +class _HTTPClient(object): + + ''' + Takes the request and sends it to cloud service and returns the response. + ''' + + def __init__(self, service_instance, cert_file=None, account_name=None, + account_key=None, protocol='https', request_session=None): + ''' + service_instance: service client instance. + cert_file: + certificate file name/location. This is only used in hosted + service management. + account_name: the storage account. + account_key: + the storage account access key. + request_session: + session object created with requests library (or compatible). + ''' + self.service_instance = service_instance + self.status = None + self.respheader = None + self.message = None + self.cert_file = cert_file + self.account_name = account_name + self.account_key = account_key + self.protocol = protocol + self.proxy_host = None + self.proxy_port = None + self.proxy_user = None + self.proxy_password = None + self.request_session = request_session + if request_session: + self.use_httplib = True + else: + self.use_httplib = self.should_use_httplib() + + def should_use_httplib(self): + if sys.platform.lower().startswith('win') and self.cert_file: + # On Windows, auto-detect between Windows Store Certificate + # (winhttp) and OpenSSL .pem certificate file (httplib). + # + # We used to only support certificates installed in the Windows + # Certificate Store. + # cert_file example: CURRENT_USER\my\CertificateName + # + # We now support using an OpenSSL .pem certificate file, + # for a consistent experience across all platforms. + # cert_file example: account\certificate.pem + # + # When using OpenSSL .pem certificate file on Windows, make sure + # you are on CPython 2.7.4 or later. + + # If it's not an existing file on disk, then treat it as a path in + # the Windows Certificate Store, which means we can't use httplib. + if not os.path.isfile(self.cert_file): + return False + + return True + + def set_proxy(self, host, port, user, password): + ''' + Sets the proxy server host and port for the HTTP CONNECT Tunnelling. + + host: Address of the proxy. Ex: '192.168.0.100' + port: Port of the proxy. Ex: 6000 + user: User for proxy authorization. + password: Password for proxy authorization. + ''' + self.proxy_host = host + self.proxy_port = port + self.proxy_user = user + self.proxy_password = password + + def get_uri(self, request): + ''' Return the target uri for the request.''' + protocol = request.protocol_override \ + if request.protocol_override else self.protocol + port = HTTP_PORT if protocol == 'http' else HTTPS_PORT + return protocol + '://' + request.host + ':' + str(port) + request.path + + def get_connection(self, request): + ''' Create connection for the request. ''' + protocol = request.protocol_override \ + if request.protocol_override else self.protocol + target_host = request.host + target_port = HTTP_PORT if protocol == 'http' else HTTPS_PORT + + if self.request_session: + import azure.http.requestsclient + connection = azure.http.requestsclient._RequestsConnection( + target_host, protocol, self.request_session) + #TODO: proxy stuff + elif not self.use_httplib: + import azure.http.winhttp + connection = azure.http.winhttp._HTTPConnection( + target_host, cert_file=self.cert_file, protocol=protocol) + proxy_host = self.proxy_host + proxy_port = self.proxy_port + else: + if ':' in target_host: + target_host, _, target_port = target_host.rpartition(':') + if self.proxy_host: + proxy_host = target_host + proxy_port = target_port + host = self.proxy_host + port = self.proxy_port + else: + host = target_host + port = target_port + + if protocol == 'http': + connection = HTTPConnection(host, int(port)) + else: + connection = HTTPSConnection( + host, int(port), cert_file=self.cert_file) + + if self.proxy_host: + headers = None + if self.proxy_user and self.proxy_password: + auth = base64.encodestring( + "{0}:{1}".format(self.proxy_user, self.proxy_password)) + headers = {'Proxy-Authorization': 'Basic {0}'.format(auth)} + connection.set_tunnel(proxy_host, int(proxy_port), headers) + + return connection + + def send_request_headers(self, connection, request_headers): + if self.use_httplib: + if self.proxy_host: + for i in connection._buffer: + if i.startswith("Host: "): + connection._buffer.remove(i) + connection.putheader( + 'Host', "{0}:{1}".format(connection._tunnel_host, + connection._tunnel_port)) + + for name, value in request_headers: + if value: + connection.putheader(name, value) + + connection.putheader('User-Agent', _USER_AGENT_STRING) + connection.endheaders() + + def send_request_body(self, connection, request_body): + if request_body: + assert isinstance(request_body, bytes) + connection.send(request_body) + elif (not isinstance(connection, HTTPSConnection) and + not isinstance(connection, HTTPConnection)): + connection.send(None) + + def perform_request(self, request): + ''' Sends request to cloud service server and return the response. ''' + connection = self.get_connection(request) + try: + connection.putrequest(request.method, request.path) + + if not self.use_httplib: + if self.proxy_host and self.proxy_user: + connection.set_proxy_credentials( + self.proxy_user, self.proxy_password) + + self.send_request_headers(connection, request.headers) + self.send_request_body(connection, request.body) + + if DEBUG_REQUESTS and request.body: + print('request:') + try: + print(request.body) + except: + pass + + resp = connection.getresponse() + self.status = int(resp.status) + self.message = resp.reason + self.respheader = headers = resp.getheaders() + + # for consistency across platforms, make header names lowercase + for i, value in enumerate(headers): + headers[i] = (value[0].lower(), value[1]) + + respbody = None + if resp.length is None: + respbody = resp.read() + elif resp.length > 0: + respbody = resp.read(resp.length) + + if DEBUG_RESPONSES and respbody: + print('response:') + try: + print(respbody) + except: + pass + + response = HTTPResponse( + int(resp.status), resp.reason, headers, respbody) + if self.status == 307: + new_url = urlparse(dict(headers)['location']) + request.host = new_url.hostname + request.path = new_url.path + request.path, request.query = _update_request_uri_query(request) + return self.perform_request(request) + if self.status >= 300: + raise HTTPError(self.status, self.message, + self.respheader, respbody) + + return response + finally: + connection.close() diff --git a/awx/lib/site-packages/azure/http/requestsclient.py b/awx/lib/site-packages/azure/http/requestsclient.py new file mode 100644 index 0000000000..02b524214c --- /dev/null +++ b/awx/lib/site-packages/azure/http/requestsclient.py @@ -0,0 +1,74 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- + +class _Response(object): + + ''' Response class corresponding to the response returned from httplib + HTTPConnection. ''' + + def __init__(self, response): + self.status = response.status_code + self.reason = response.reason + self.respbody = response.content + self.length = len(response.content) + self.headers = [] + for key, name in response.headers.items(): + self.headers.append((key.lower(), name)) + + def getheaders(self): + '''Returns response headers.''' + return self.headers + + def read(self, _length): + '''Returns response body. ''' + return self.respbody[:_length] + + +class _RequestsConnection(object): + + def __init__(self, host, protocol, session): + self.host = host + self.protocol = protocol + self.session = session + self.headers = {} + self.method = None + self.body = None + self.response = None + self.uri = None + + def close(self): + pass + + def set_tunnel(self, host, port=None, headers=None): + pass + + def set_proxy_credentials(self, user, password): + pass + + def putrequest(self, method, uri): + self.method = method + self.uri = self.protocol + '://' + self.host + uri + + def putheader(self, name, value): + self.headers[name] = value + + def endheaders(self): + pass + + def send(self, request_body): + self.response = self.session.request(self.method, self.uri, data=request_body, headers=self.headers) + + def getresponse(self): + return _Response(self.response) diff --git a/awx/lib/site-packages/azure/http/winhttp.py b/awx/lib/site-packages/azure/http/winhttp.py index 86790333fc..055bf778e0 100644 --- a/awx/lib/site-packages/azure/http/winhttp.py +++ b/awx/lib/site-packages/azure/http/winhttp.py @@ -1,471 +1,471 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from ctypes import ( - c_void_p, - c_long, - c_ulong, - c_longlong, - c_ulonglong, - c_short, - c_ushort, - c_wchar_p, - c_byte, - byref, - Structure, - Union, - POINTER, - WINFUNCTYPE, - HRESULT, - oledll, - WinDLL, - ) -import ctypes -import sys - -if sys.version_info >= (3,): - def unicode(text): - return text - -#------------------------------------------------------------------------------ -# Constants that are used in COM operations -VT_EMPTY = 0 -VT_NULL = 1 -VT_I2 = 2 -VT_I4 = 3 -VT_BSTR = 8 -VT_BOOL = 11 -VT_I1 = 16 -VT_UI1 = 17 -VT_UI2 = 18 -VT_UI4 = 19 -VT_I8 = 20 -VT_UI8 = 21 -VT_ARRAY = 8192 - -HTTPREQUEST_PROXYSETTING_PROXY = 2 -HTTPREQUEST_SETCREDENTIALS_FOR_PROXY = 1 - -HTTPREQUEST_PROXY_SETTING = c_long -HTTPREQUEST_SETCREDENTIALS_FLAGS = c_long -#------------------------------------------------------------------------------ -# Com related APIs that are used. -_ole32 = oledll.ole32 -_oleaut32 = WinDLL('oleaut32') -_CLSIDFromString = _ole32.CLSIDFromString -_CoInitialize = _ole32.CoInitialize -_CoInitialize.argtypes = [c_void_p] - -_CoCreateInstance = _ole32.CoCreateInstance - -_SysAllocString = _oleaut32.SysAllocString -_SysAllocString.restype = c_void_p -_SysAllocString.argtypes = [c_wchar_p] - -_SysFreeString = _oleaut32.SysFreeString -_SysFreeString.argtypes = [c_void_p] - -# SAFEARRAY* -# SafeArrayCreateVector(_In_ VARTYPE vt,_In_ LONG lLbound,_In_ ULONG -# cElements); -_SafeArrayCreateVector = _oleaut32.SafeArrayCreateVector -_SafeArrayCreateVector.restype = c_void_p -_SafeArrayCreateVector.argtypes = [c_ushort, c_long, c_ulong] - -# HRESULT -# SafeArrayAccessData(_In_ SAFEARRAY *psa, _Out_ void **ppvData); -_SafeArrayAccessData = _oleaut32.SafeArrayAccessData -_SafeArrayAccessData.argtypes = [c_void_p, POINTER(c_void_p)] - -# HRESULT -# SafeArrayUnaccessData(_In_ SAFEARRAY *psa); -_SafeArrayUnaccessData = _oleaut32.SafeArrayUnaccessData -_SafeArrayUnaccessData.argtypes = [c_void_p] - -# HRESULT -# SafeArrayGetUBound(_In_ SAFEARRAY *psa, _In_ UINT nDim, _Out_ LONG -# *plUbound); -_SafeArrayGetUBound = _oleaut32.SafeArrayGetUBound -_SafeArrayGetUBound.argtypes = [c_void_p, c_ulong, POINTER(c_long)] - - -#------------------------------------------------------------------------------ - -class BSTR(c_wchar_p): - - ''' BSTR class in python. ''' - - def __init__(self, value): - super(BSTR, self).__init__(_SysAllocString(value)) - - def __del__(self): - _SysFreeString(self) - - -class VARIANT(Structure): - - ''' - VARIANT structure in python. Does not match the definition in - MSDN exactly & it is only mapping the used fields. Field names are also - slighty different. - ''' - - class _tagData(Union): - - class _tagRecord(Structure): - _fields_ = [('pvoid', c_void_p), ('precord', c_void_p)] - - _fields_ = [('llval', c_longlong), - ('ullval', c_ulonglong), - ('lval', c_long), - ('ulval', c_ulong), - ('ival', c_short), - ('boolval', c_ushort), - ('bstrval', BSTR), - ('parray', c_void_p), - ('record', _tagRecord)] - - _fields_ = [('vt', c_ushort), - ('wReserved1', c_ushort), - ('wReserved2', c_ushort), - ('wReserved3', c_ushort), - ('vdata', _tagData)] - - @staticmethod - def create_empty(): - variant = VARIANT() - variant.vt = VT_EMPTY - variant.vdata.llval = 0 - return variant - - @staticmethod - def create_safearray_from_str(text): - variant = VARIANT() - variant.vt = VT_ARRAY | VT_UI1 - - length = len(text) - variant.vdata.parray = _SafeArrayCreateVector(VT_UI1, 0, length) - pvdata = c_void_p() - _SafeArrayAccessData(variant.vdata.parray, byref(pvdata)) - ctypes.memmove(pvdata, text, length) - _SafeArrayUnaccessData(variant.vdata.parray) - - return variant - - @staticmethod - def create_bstr_from_str(text): - variant = VARIANT() - variant.vt = VT_BSTR - variant.vdata.bstrval = BSTR(text) - return variant - - @staticmethod - def create_bool_false(): - variant = VARIANT() - variant.vt = VT_BOOL - variant.vdata.boolval = 0 - return variant - - def is_safearray_of_bytes(self): - return self.vt == VT_ARRAY | VT_UI1 - - def str_from_safearray(self): - assert self.vt == VT_ARRAY | VT_UI1 - pvdata = c_void_p() - count = c_long() - _SafeArrayGetUBound(self.vdata.parray, 1, byref(count)) - count = c_long(count.value + 1) - _SafeArrayAccessData(self.vdata.parray, byref(pvdata)) - text = ctypes.string_at(pvdata, count) - _SafeArrayUnaccessData(self.vdata.parray) - return text - - def __del__(self): - _VariantClear(self) - -# HRESULT VariantClear(_Inout_ VARIANTARG *pvarg); -_VariantClear = _oleaut32.VariantClear -_VariantClear.argtypes = [POINTER(VARIANT)] - - -class GUID(Structure): - - ''' GUID structure in python. ''' - - _fields_ = [("data1", c_ulong), - ("data2", c_ushort), - ("data3", c_ushort), - ("data4", c_byte * 8)] - - def __init__(self, name=None): - if name is not None: - _CLSIDFromString(unicode(name), byref(self)) - - -class _WinHttpRequest(c_void_p): - - ''' - Maps the Com API to Python class functions. Not all methods in - IWinHttpWebRequest are mapped - only the methods we use. - ''' - _AddRef = WINFUNCTYPE(c_long) \ - (1, 'AddRef') - _Release = WINFUNCTYPE(c_long) \ - (2, 'Release') - _SetProxy = WINFUNCTYPE(HRESULT, - HTTPREQUEST_PROXY_SETTING, - VARIANT, - VARIANT) \ - (7, 'SetProxy') - _SetCredentials = WINFUNCTYPE(HRESULT, - BSTR, - BSTR, - HTTPREQUEST_SETCREDENTIALS_FLAGS) \ - (8, 'SetCredentials') - _Open = WINFUNCTYPE(HRESULT, BSTR, BSTR, VARIANT) \ - (9, 'Open') - _SetRequestHeader = WINFUNCTYPE(HRESULT, BSTR, BSTR) \ - (10, 'SetRequestHeader') - _GetResponseHeader = WINFUNCTYPE(HRESULT, BSTR, POINTER(c_void_p)) \ - (11, 'GetResponseHeader') - _GetAllResponseHeaders = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ - (12, 'GetAllResponseHeaders') - _Send = WINFUNCTYPE(HRESULT, VARIANT) \ - (13, 'Send') - _Status = WINFUNCTYPE(HRESULT, POINTER(c_long)) \ - (14, 'Status') - _StatusText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ - (15, 'StatusText') - _ResponseText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ - (16, 'ResponseText') - _ResponseBody = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \ - (17, 'ResponseBody') - _ResponseStream = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \ - (18, 'ResponseStream') - _WaitForResponse = WINFUNCTYPE(HRESULT, VARIANT, POINTER(c_ushort)) \ - (21, 'WaitForResponse') - _Abort = WINFUNCTYPE(HRESULT) \ - (22, 'Abort') - _SetTimeouts = WINFUNCTYPE(HRESULT, c_long, c_long, c_long, c_long) \ - (23, 'SetTimeouts') - _SetClientCertificate = WINFUNCTYPE(HRESULT, BSTR) \ - (24, 'SetClientCertificate') - - def open(self, method, url): - ''' - Opens the request. - - method: the request VERB 'GET', 'POST', etc. - url: the url to connect - ''' - _WinHttpRequest._SetTimeouts(self, 0, 65000, 65000, 65000) - - flag = VARIANT.create_bool_false() - _method = BSTR(method) - _url = BSTR(url) - _WinHttpRequest._Open(self, _method, _url, flag) - - def set_request_header(self, name, value): - ''' Sets the request header. ''' - - _name = BSTR(name) - _value = BSTR(value) - _WinHttpRequest._SetRequestHeader(self, _name, _value) - - def get_all_response_headers(self): - ''' Gets back all response headers. ''' - - bstr_headers = c_void_p() - _WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers)) - bstr_headers = ctypes.cast(bstr_headers, c_wchar_p) - headers = bstr_headers.value - _SysFreeString(bstr_headers) - return headers - - def send(self, request=None): - ''' Sends the request body. ''' - - # Sends VT_EMPTY if it is GET, HEAD request. - if request is None: - var_empty = VARIANT.create_empty() - _WinHttpRequest._Send(self, var_empty) - else: # Sends request body as SAFEArray. - _request = VARIANT.create_safearray_from_str(request) - _WinHttpRequest._Send(self, _request) - - def status(self): - ''' Gets status of response. ''' - - status = c_long() - _WinHttpRequest._Status(self, byref(status)) - return int(status.value) - - def status_text(self): - ''' Gets status text of response. ''' - - bstr_status_text = c_void_p() - _WinHttpRequest._StatusText(self, byref(bstr_status_text)) - bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p) - status_text = bstr_status_text.value - _SysFreeString(bstr_status_text) - return status_text - - def response_body(self): - ''' - Gets response body as a SAFEARRAY and converts the SAFEARRAY to str. - If it is an xml file, it always contains 3 characters before <?xml, - so we remove them. - ''' - var_respbody = VARIANT() - _WinHttpRequest._ResponseBody(self, byref(var_respbody)) - if var_respbody.is_safearray_of_bytes(): - respbody = var_respbody.str_from_safearray() - if respbody[3:].startswith(b'<?xml') and\ - respbody.startswith(b'\xef\xbb\xbf'): - respbody = respbody[3:] - return respbody - else: - return '' - - def set_client_certificate(self, certificate): - '''Sets client certificate for the request. ''' - _certificate = BSTR(certificate) - _WinHttpRequest._SetClientCertificate(self, _certificate) - - def set_tunnel(self, host, port): - ''' Sets up the host and the port for the HTTP CONNECT Tunnelling.''' - url = host - if port: - url = url + u':' + port - - var_host = VARIANT.create_bstr_from_str(url) - var_empty = VARIANT.create_empty() - - _WinHttpRequest._SetProxy( - self, HTTPREQUEST_PROXYSETTING_PROXY, var_host, var_empty) - - def set_proxy_credentials(self, user, password): - _WinHttpRequest._SetCredentials( - self, BSTR(user), BSTR(password), - HTTPREQUEST_SETCREDENTIALS_FOR_PROXY) - - def __del__(self): - if self.value is not None: - _WinHttpRequest._Release(self) - - -class _Response(object): - - ''' Response class corresponding to the response returned from httplib - HTTPConnection. ''' - - def __init__(self, _status, _status_text, _length, _headers, _respbody): - self.status = _status - self.reason = _status_text - self.length = _length - self.headers = _headers - self.respbody = _respbody - - def getheaders(self): - '''Returns response headers.''' - return self.headers - - def read(self, _length): - '''Returns resonse body. ''' - return self.respbody[:_length] - - -class _HTTPConnection(object): - - ''' Class corresponding to httplib HTTPConnection class. ''' - - def __init__(self, host, cert_file=None, key_file=None, protocol='http'): - ''' initialize the IWinHttpWebRequest Com Object.''' - self.host = unicode(host) - self.cert_file = cert_file - self._httprequest = _WinHttpRequest() - self.protocol = protocol - clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}') - iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}') - _CoInitialize(None) - _CoCreateInstance(byref(clsid), 0, 1, byref(iid), - byref(self._httprequest)) - - def close(self): - pass - - def set_tunnel(self, host, port=None, headers=None): - ''' Sets up the host and the port for the HTTP CONNECT Tunnelling. ''' - self._httprequest.set_tunnel(unicode(host), unicode(str(port))) - - def set_proxy_credentials(self, user, password): - self._httprequest.set_proxy_credentials( - unicode(user), unicode(password)) - - def putrequest(self, method, uri): - ''' Connects to host and sends the request. ''' - - protocol = unicode(self.protocol + '://') - url = protocol + self.host + unicode(uri) - self._httprequest.open(unicode(method), url) - - # sets certificate for the connection if cert_file is set. - if self.cert_file is not None: - self._httprequest.set_client_certificate(unicode(self.cert_file)) - - def putheader(self, name, value): - ''' Sends the headers of request. ''' - if sys.version_info < (3,): - name = str(name).decode('utf-8') - value = str(value).decode('utf-8') - self._httprequest.set_request_header(name, value) - - def endheaders(self): - ''' No operation. Exists only to provide the same interface of httplib - HTTPConnection.''' - pass - - def send(self, request_body): - ''' Sends request body. ''' - if not request_body: - self._httprequest.send() - else: - self._httprequest.send(request_body) - - def getresponse(self): - ''' Gets the response and generates the _Response object''' - status = self._httprequest.status() - status_text = self._httprequest.status_text() - - resp_headers = self._httprequest.get_all_response_headers() - fixed_headers = [] - for resp_header in resp_headers.split('\n'): - if (resp_header.startswith('\t') or\ - resp_header.startswith(' ')) and fixed_headers: - # append to previous header - fixed_headers[-1] += resp_header - else: - fixed_headers.append(resp_header) - - headers = [] - for resp_header in fixed_headers: - if ':' in resp_header: - pos = resp_header.find(':') - headers.append( - (resp_header[:pos].lower(), resp_header[pos + 1:].strip())) - - body = self._httprequest.response_body() - length = len(body) - - return _Response(status, status_text, length, headers, body) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from ctypes import ( + c_void_p, + c_long, + c_ulong, + c_longlong, + c_ulonglong, + c_short, + c_ushort, + c_wchar_p, + c_byte, + byref, + Structure, + Union, + POINTER, + WINFUNCTYPE, + HRESULT, + oledll, + WinDLL, + ) +import ctypes +import sys + +if sys.version_info >= (3,): + def unicode(text): + return text + +#------------------------------------------------------------------------------ +# Constants that are used in COM operations +VT_EMPTY = 0 +VT_NULL = 1 +VT_I2 = 2 +VT_I4 = 3 +VT_BSTR = 8 +VT_BOOL = 11 +VT_I1 = 16 +VT_UI1 = 17 +VT_UI2 = 18 +VT_UI4 = 19 +VT_I8 = 20 +VT_UI8 = 21 +VT_ARRAY = 8192 + +HTTPREQUEST_PROXYSETTING_PROXY = 2 +HTTPREQUEST_SETCREDENTIALS_FOR_PROXY = 1 + +HTTPREQUEST_PROXY_SETTING = c_long +HTTPREQUEST_SETCREDENTIALS_FLAGS = c_long +#------------------------------------------------------------------------------ +# Com related APIs that are used. +_ole32 = oledll.ole32 +_oleaut32 = WinDLL('oleaut32') +_CLSIDFromString = _ole32.CLSIDFromString +_CoInitialize = _ole32.CoInitialize +_CoInitialize.argtypes = [c_void_p] + +_CoCreateInstance = _ole32.CoCreateInstance + +_SysAllocString = _oleaut32.SysAllocString +_SysAllocString.restype = c_void_p +_SysAllocString.argtypes = [c_wchar_p] + +_SysFreeString = _oleaut32.SysFreeString +_SysFreeString.argtypes = [c_void_p] + +# SAFEARRAY* +# SafeArrayCreateVector(_In_ VARTYPE vt,_In_ LONG lLbound,_In_ ULONG +# cElements); +_SafeArrayCreateVector = _oleaut32.SafeArrayCreateVector +_SafeArrayCreateVector.restype = c_void_p +_SafeArrayCreateVector.argtypes = [c_ushort, c_long, c_ulong] + +# HRESULT +# SafeArrayAccessData(_In_ SAFEARRAY *psa, _Out_ void **ppvData); +_SafeArrayAccessData = _oleaut32.SafeArrayAccessData +_SafeArrayAccessData.argtypes = [c_void_p, POINTER(c_void_p)] + +# HRESULT +# SafeArrayUnaccessData(_In_ SAFEARRAY *psa); +_SafeArrayUnaccessData = _oleaut32.SafeArrayUnaccessData +_SafeArrayUnaccessData.argtypes = [c_void_p] + +# HRESULT +# SafeArrayGetUBound(_In_ SAFEARRAY *psa, _In_ UINT nDim, _Out_ LONG +# *plUbound); +_SafeArrayGetUBound = _oleaut32.SafeArrayGetUBound +_SafeArrayGetUBound.argtypes = [c_void_p, c_ulong, POINTER(c_long)] + + +#------------------------------------------------------------------------------ + +class BSTR(c_wchar_p): + + ''' BSTR class in python. ''' + + def __init__(self, value): + super(BSTR, self).__init__(_SysAllocString(value)) + + def __del__(self): + _SysFreeString(self) + + +class VARIANT(Structure): + + ''' + VARIANT structure in python. Does not match the definition in + MSDN exactly & it is only mapping the used fields. Field names are also + slighty different. + ''' + + class _tagData(Union): + + class _tagRecord(Structure): + _fields_ = [('pvoid', c_void_p), ('precord', c_void_p)] + + _fields_ = [('llval', c_longlong), + ('ullval', c_ulonglong), + ('lval', c_long), + ('ulval', c_ulong), + ('ival', c_short), + ('boolval', c_ushort), + ('bstrval', BSTR), + ('parray', c_void_p), + ('record', _tagRecord)] + + _fields_ = [('vt', c_ushort), + ('wReserved1', c_ushort), + ('wReserved2', c_ushort), + ('wReserved3', c_ushort), + ('vdata', _tagData)] + + @staticmethod + def create_empty(): + variant = VARIANT() + variant.vt = VT_EMPTY + variant.vdata.llval = 0 + return variant + + @staticmethod + def create_safearray_from_str(text): + variant = VARIANT() + variant.vt = VT_ARRAY | VT_UI1 + + length = len(text) + variant.vdata.parray = _SafeArrayCreateVector(VT_UI1, 0, length) + pvdata = c_void_p() + _SafeArrayAccessData(variant.vdata.parray, byref(pvdata)) + ctypes.memmove(pvdata, text, length) + _SafeArrayUnaccessData(variant.vdata.parray) + + return variant + + @staticmethod + def create_bstr_from_str(text): + variant = VARIANT() + variant.vt = VT_BSTR + variant.vdata.bstrval = BSTR(text) + return variant + + @staticmethod + def create_bool_false(): + variant = VARIANT() + variant.vt = VT_BOOL + variant.vdata.boolval = 0 + return variant + + def is_safearray_of_bytes(self): + return self.vt == VT_ARRAY | VT_UI1 + + def str_from_safearray(self): + assert self.vt == VT_ARRAY | VT_UI1 + pvdata = c_void_p() + count = c_long() + _SafeArrayGetUBound(self.vdata.parray, 1, byref(count)) + count = c_long(count.value + 1) + _SafeArrayAccessData(self.vdata.parray, byref(pvdata)) + text = ctypes.string_at(pvdata, count) + _SafeArrayUnaccessData(self.vdata.parray) + return text + + def __del__(self): + _VariantClear(self) + +# HRESULT VariantClear(_Inout_ VARIANTARG *pvarg); +_VariantClear = _oleaut32.VariantClear +_VariantClear.argtypes = [POINTER(VARIANT)] + + +class GUID(Structure): + + ''' GUID structure in python. ''' + + _fields_ = [("data1", c_ulong), + ("data2", c_ushort), + ("data3", c_ushort), + ("data4", c_byte * 8)] + + def __init__(self, name=None): + if name is not None: + _CLSIDFromString(unicode(name), byref(self)) + + +class _WinHttpRequest(c_void_p): + + ''' + Maps the Com API to Python class functions. Not all methods in + IWinHttpWebRequest are mapped - only the methods we use. + ''' + _AddRef = WINFUNCTYPE(c_long) \ + (1, 'AddRef') + _Release = WINFUNCTYPE(c_long) \ + (2, 'Release') + _SetProxy = WINFUNCTYPE(HRESULT, + HTTPREQUEST_PROXY_SETTING, + VARIANT, + VARIANT) \ + (7, 'SetProxy') + _SetCredentials = WINFUNCTYPE(HRESULT, + BSTR, + BSTR, + HTTPREQUEST_SETCREDENTIALS_FLAGS) \ + (8, 'SetCredentials') + _Open = WINFUNCTYPE(HRESULT, BSTR, BSTR, VARIANT) \ + (9, 'Open') + _SetRequestHeader = WINFUNCTYPE(HRESULT, BSTR, BSTR) \ + (10, 'SetRequestHeader') + _GetResponseHeader = WINFUNCTYPE(HRESULT, BSTR, POINTER(c_void_p)) \ + (11, 'GetResponseHeader') + _GetAllResponseHeaders = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ + (12, 'GetAllResponseHeaders') + _Send = WINFUNCTYPE(HRESULT, VARIANT) \ + (13, 'Send') + _Status = WINFUNCTYPE(HRESULT, POINTER(c_long)) \ + (14, 'Status') + _StatusText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ + (15, 'StatusText') + _ResponseText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \ + (16, 'ResponseText') + _ResponseBody = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \ + (17, 'ResponseBody') + _ResponseStream = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \ + (18, 'ResponseStream') + _WaitForResponse = WINFUNCTYPE(HRESULT, VARIANT, POINTER(c_ushort)) \ + (21, 'WaitForResponse') + _Abort = WINFUNCTYPE(HRESULT) \ + (22, 'Abort') + _SetTimeouts = WINFUNCTYPE(HRESULT, c_long, c_long, c_long, c_long) \ + (23, 'SetTimeouts') + _SetClientCertificate = WINFUNCTYPE(HRESULT, BSTR) \ + (24, 'SetClientCertificate') + + def open(self, method, url): + ''' + Opens the request. + + method: the request VERB 'GET', 'POST', etc. + url: the url to connect + ''' + _WinHttpRequest._SetTimeouts(self, 0, 65000, 65000, 65000) + + flag = VARIANT.create_bool_false() + _method = BSTR(method) + _url = BSTR(url) + _WinHttpRequest._Open(self, _method, _url, flag) + + def set_request_header(self, name, value): + ''' Sets the request header. ''' + + _name = BSTR(name) + _value = BSTR(value) + _WinHttpRequest._SetRequestHeader(self, _name, _value) + + def get_all_response_headers(self): + ''' Gets back all response headers. ''' + + bstr_headers = c_void_p() + _WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers)) + bstr_headers = ctypes.cast(bstr_headers, c_wchar_p) + headers = bstr_headers.value + _SysFreeString(bstr_headers) + return headers + + def send(self, request=None): + ''' Sends the request body. ''' + + # Sends VT_EMPTY if it is GET, HEAD request. + if request is None: + var_empty = VARIANT.create_empty() + _WinHttpRequest._Send(self, var_empty) + else: # Sends request body as SAFEArray. + _request = VARIANT.create_safearray_from_str(request) + _WinHttpRequest._Send(self, _request) + + def status(self): + ''' Gets status of response. ''' + + status = c_long() + _WinHttpRequest._Status(self, byref(status)) + return int(status.value) + + def status_text(self): + ''' Gets status text of response. ''' + + bstr_status_text = c_void_p() + _WinHttpRequest._StatusText(self, byref(bstr_status_text)) + bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p) + status_text = bstr_status_text.value + _SysFreeString(bstr_status_text) + return status_text + + def response_body(self): + ''' + Gets response body as a SAFEARRAY and converts the SAFEARRAY to str. + If it is an xml file, it always contains 3 characters before <?xml, + so we remove them. + ''' + var_respbody = VARIANT() + _WinHttpRequest._ResponseBody(self, byref(var_respbody)) + if var_respbody.is_safearray_of_bytes(): + respbody = var_respbody.str_from_safearray() + if respbody[3:].startswith(b'<?xml') and\ + respbody.startswith(b'\xef\xbb\xbf'): + respbody = respbody[3:] + return respbody + else: + return '' + + def set_client_certificate(self, certificate): + '''Sets client certificate for the request. ''' + _certificate = BSTR(certificate) + _WinHttpRequest._SetClientCertificate(self, _certificate) + + def set_tunnel(self, host, port): + ''' Sets up the host and the port for the HTTP CONNECT Tunnelling.''' + url = host + if port: + url = url + u':' + port + + var_host = VARIANT.create_bstr_from_str(url) + var_empty = VARIANT.create_empty() + + _WinHttpRequest._SetProxy( + self, HTTPREQUEST_PROXYSETTING_PROXY, var_host, var_empty) + + def set_proxy_credentials(self, user, password): + _WinHttpRequest._SetCredentials( + self, BSTR(user), BSTR(password), + HTTPREQUEST_SETCREDENTIALS_FOR_PROXY) + + def __del__(self): + if self.value is not None: + _WinHttpRequest._Release(self) + + +class _Response(object): + + ''' Response class corresponding to the response returned from httplib + HTTPConnection. ''' + + def __init__(self, _status, _status_text, _length, _headers, _respbody): + self.status = _status + self.reason = _status_text + self.length = _length + self.headers = _headers + self.respbody = _respbody + + def getheaders(self): + '''Returns response headers.''' + return self.headers + + def read(self, _length): + '''Returns resonse body. ''' + return self.respbody[:_length] + + +class _HTTPConnection(object): + + ''' Class corresponding to httplib HTTPConnection class. ''' + + def __init__(self, host, cert_file=None, key_file=None, protocol='http'): + ''' initialize the IWinHttpWebRequest Com Object.''' + self.host = unicode(host) + self.cert_file = cert_file + self._httprequest = _WinHttpRequest() + self.protocol = protocol + clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}') + iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}') + _CoInitialize(None) + _CoCreateInstance(byref(clsid), 0, 1, byref(iid), + byref(self._httprequest)) + + def close(self): + pass + + def set_tunnel(self, host, port=None, headers=None): + ''' Sets up the host and the port for the HTTP CONNECT Tunnelling. ''' + self._httprequest.set_tunnel(unicode(host), unicode(str(port))) + + def set_proxy_credentials(self, user, password): + self._httprequest.set_proxy_credentials( + unicode(user), unicode(password)) + + def putrequest(self, method, uri): + ''' Connects to host and sends the request. ''' + + protocol = unicode(self.protocol + '://') + url = protocol + self.host + unicode(uri) + self._httprequest.open(unicode(method), url) + + # sets certificate for the connection if cert_file is set. + if self.cert_file is not None: + self._httprequest.set_client_certificate(unicode(self.cert_file)) + + def putheader(self, name, value): + ''' Sends the headers of request. ''' + if sys.version_info < (3,): + name = str(name).decode('utf-8') + value = str(value).decode('utf-8') + self._httprequest.set_request_header(name, value) + + def endheaders(self): + ''' No operation. Exists only to provide the same interface of httplib + HTTPConnection.''' + pass + + def send(self, request_body): + ''' Sends request body. ''' + if not request_body: + self._httprequest.send() + else: + self._httprequest.send(request_body) + + def getresponse(self): + ''' Gets the response and generates the _Response object''' + status = self._httprequest.status() + status_text = self._httprequest.status_text() + + resp_headers = self._httprequest.get_all_response_headers() + fixed_headers = [] + for resp_header in resp_headers.split('\n'): + if (resp_header.startswith('\t') or\ + resp_header.startswith(' ')) and fixed_headers: + # append to previous header + fixed_headers[-1] += resp_header + else: + fixed_headers.append(resp_header) + + headers = [] + for resp_header in fixed_headers: + if ':' in resp_header: + pos = resp_header.find(':') + headers.append( + (resp_header[:pos].lower(), resp_header[pos + 1:].strip())) + + body = self._httprequest.response_body() + length = len(body) + + return _Response(status, status_text, length, headers, body) diff --git a/awx/lib/site-packages/azure/servicebus/__init__.py b/awx/lib/site-packages/azure/servicebus/__init__.py index bee85f4efd..ba098f8123 100644 --- a/awx/lib/site-packages/azure/servicebus/__init__.py +++ b/awx/lib/site-packages/azure/servicebus/__init__.py @@ -1,851 +1,852 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import ast -import sys - -from datetime import datetime -from xml.dom import minidom -from azure import ( - WindowsAzureData, - WindowsAzureError, - xml_escape, - _create_entry, - _general_error_handler, - _get_entry_properties, - _get_child_nodes, - _get_children_from_path, - _get_first_child_node_value, - _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE, - _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK, - _ERROR_QUEUE_NOT_FOUND, - _ERROR_TOPIC_NOT_FOUND, - ) -from azure.http import HTTPError - -# default rule name for subscription -DEFAULT_RULE_NAME = '$Default' - -#----------------------------------------------------------------------------- -# Constants for Azure app environment settings. -AZURE_SERVICEBUS_NAMESPACE = 'AZURE_SERVICEBUS_NAMESPACE' -AZURE_SERVICEBUS_ACCESS_KEY = 'AZURE_SERVICEBUS_ACCESS_KEY' -AZURE_SERVICEBUS_ISSUER = 'AZURE_SERVICEBUS_ISSUER' - -# namespace used for converting rules to objects -XML_SCHEMA_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance' - - -class Queue(WindowsAzureData): - - ''' Queue class corresponding to Queue Description: - http://msdn.microsoft.com/en-us/library/windowsazure/hh780773''' - - def __init__(self, lock_duration=None, max_size_in_megabytes=None, - requires_duplicate_detection=None, requires_session=None, - default_message_time_to_live=None, - dead_lettering_on_message_expiration=None, - duplicate_detection_history_time_window=None, - max_delivery_count=None, enable_batched_operations=None, - size_in_bytes=None, message_count=None): - - self.lock_duration = lock_duration - self.max_size_in_megabytes = max_size_in_megabytes - self.requires_duplicate_detection = requires_duplicate_detection - self.requires_session = requires_session - self.default_message_time_to_live = default_message_time_to_live - self.dead_lettering_on_message_expiration = \ - dead_lettering_on_message_expiration - self.duplicate_detection_history_time_window = \ - duplicate_detection_history_time_window - self.max_delivery_count = max_delivery_count - self.enable_batched_operations = enable_batched_operations - self.size_in_bytes = size_in_bytes - self.message_count = message_count - - -class Topic(WindowsAzureData): - - ''' Topic class corresponding to Topic Description: - http://msdn.microsoft.com/en-us/library/windowsazure/hh780749. ''' - - def __init__(self, default_message_time_to_live=None, - max_size_in_megabytes=None, requires_duplicate_detection=None, - duplicate_detection_history_time_window=None, - enable_batched_operations=None, size_in_bytes=None): - - self.default_message_time_to_live = default_message_time_to_live - self.max_size_in_megabytes = max_size_in_megabytes - self.requires_duplicate_detection = requires_duplicate_detection - self.duplicate_detection_history_time_window = \ - duplicate_detection_history_time_window - self.enable_batched_operations = enable_batched_operations - self.size_in_bytes = size_in_bytes - - @property - def max_size_in_mega_bytes(self): - import warnings - warnings.warn( - 'This attribute has been changed to max_size_in_megabytes.') - return self.max_size_in_megabytes - - @max_size_in_mega_bytes.setter - def max_size_in_mega_bytes(self, value): - self.max_size_in_megabytes = value - - -class Subscription(WindowsAzureData): - - ''' Subscription class corresponding to Subscription Description: - http://msdn.microsoft.com/en-us/library/windowsazure/hh780763. ''' - - def __init__(self, lock_duration=None, requires_session=None, - default_message_time_to_live=None, - dead_lettering_on_message_expiration=None, - dead_lettering_on_filter_evaluation_exceptions=None, - enable_batched_operations=None, max_delivery_count=None, - message_count=None): - - self.lock_duration = lock_duration - self.requires_session = requires_session - self.default_message_time_to_live = default_message_time_to_live - self.dead_lettering_on_message_expiration = \ - dead_lettering_on_message_expiration - self.dead_lettering_on_filter_evaluation_exceptions = \ - dead_lettering_on_filter_evaluation_exceptions - self.enable_batched_operations = enable_batched_operations - self.max_delivery_count = max_delivery_count - self.message_count = message_count - - -class Rule(WindowsAzureData): - - ''' Rule class corresponding to Rule Description: - http://msdn.microsoft.com/en-us/library/windowsazure/hh780753. ''' - - def __init__(self, filter_type=None, filter_expression=None, - action_type=None, action_expression=None): - self.filter_type = filter_type - self.filter_expression = filter_expression - self.action_type = action_type - self.action_expression = action_type - - -class Message(WindowsAzureData): - - ''' Message class that used in send message/get mesage apis. ''' - - def __init__(self, body=None, service_bus_service=None, location=None, - custom_properties=None, - type='application/atom+xml;type=entry;charset=utf-8', - broker_properties=None): - self.body = body - self.location = location - self.broker_properties = broker_properties - self.custom_properties = custom_properties - self.type = type - self.service_bus_service = service_bus_service - self._topic_name = None - self._subscription_name = None - self._queue_name = None - - if not service_bus_service: - return - - # if location is set, then extracts the queue name for queue message and - # extracts the topic and subscriptions name if it is topic message. - if location: - if '/subscriptions/' in location: - pos = location.find('/subscriptions/') - pos1 = location.rfind('/', 0, pos - 1) - self._topic_name = location[pos1 + 1:pos] - pos += len('/subscriptions/') - pos1 = location.find('/', pos) - self._subscription_name = location[pos:pos1] - elif '/messages/' in location: - pos = location.find('/messages/') - pos1 = location.rfind('/', 0, pos - 1) - self._queue_name = location[pos1 + 1:pos] - - def delete(self): - ''' Deletes itself if find queue name or topic name and subscription - name. ''' - if self._queue_name: - self.service_bus_service.delete_queue_message( - self._queue_name, - self.broker_properties['SequenceNumber'], - self.broker_properties['LockToken']) - elif self._topic_name and self._subscription_name: - self.service_bus_service.delete_subscription_message( - self._topic_name, - self._subscription_name, - self.broker_properties['SequenceNumber'], - self.broker_properties['LockToken']) - else: - raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE) - - def unlock(self): - ''' Unlocks itself if find queue name or topic name and subscription - name. ''' - if self._queue_name: - self.service_bus_service.unlock_queue_message( - self._queue_name, - self.broker_properties['SequenceNumber'], - self.broker_properties['LockToken']) - elif self._topic_name and self._subscription_name: - self.service_bus_service.unlock_subscription_message( - self._topic_name, - self._subscription_name, - self.broker_properties['SequenceNumber'], - self.broker_properties['LockToken']) - else: - raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK) - - def add_headers(self, request): - ''' add addtional headers to request for message request.''' - - # Adds custom properties - if self.custom_properties: - for name, value in self.custom_properties.items(): - if sys.version_info < (3,) and isinstance(value, unicode): - request.headers.append( - (name, '"' + value.encode('utf-8') + '"')) - elif isinstance(value, str): - request.headers.append((name, '"' + str(value) + '"')) - elif isinstance(value, datetime): - request.headers.append( - (name, '"' + value.strftime('%a, %d %b %Y %H:%M:%S GMT') + '"')) - else: - request.headers.append((name, str(value).lower())) - - # Adds content-type - request.headers.append(('Content-Type', self.type)) - - # Adds BrokerProperties - if self.broker_properties: - request.headers.append( - ('BrokerProperties', str(self.broker_properties))) - - return request.headers - - -def _create_message(response, service_instance): - ''' Create message from response. - - response: response from service bus cloud server. - service_instance: the service bus client. - ''' - respbody = response.body - custom_properties = {} - broker_properties = None - message_type = None - message_location = None - - # gets all information from respheaders. - for name, value in response.headers: - if name.lower() == 'brokerproperties': - broker_properties = ast.literal_eval(value) - elif name.lower() == 'content-type': - message_type = value - elif name.lower() == 'location': - message_location = value - elif name.lower() not in ['content-type', - 'brokerproperties', - 'transfer-encoding', - 'server', - 'location', - 'date']: - if '"' in value: - value = value[1:-1] - try: - custom_properties[name] = datetime.strptime( - value, '%a, %d %b %Y %H:%M:%S GMT') - except ValueError: - custom_properties[name] = value - else: # only int, float or boolean - if value.lower() == 'true': - custom_properties[name] = True - elif value.lower() == 'false': - custom_properties[name] = False - # int('3.1') doesn't work so need to get float('3.14') first - elif str(int(float(value))) == value: - custom_properties[name] = int(value) - else: - custom_properties[name] = float(value) - - if message_type == None: - message = Message( - respbody, service_instance, message_location, custom_properties, - 'application/atom+xml;type=entry;charset=utf-8', broker_properties) - else: - message = Message(respbody, service_instance, message_location, - custom_properties, message_type, broker_properties) - return message - -# convert functions - - -def _convert_response_to_rule(response): - return _convert_xml_to_rule(response.body) - - -def _convert_xml_to_rule(xmlstr): - ''' Converts response xml to rule object. - - The format of xml for rule: -<entry xmlns='http://www.w3.org/2005/Atom'> -<content type='application/xml'> -<RuleDescription - xmlns:i="http://www.w3.org/2001/XMLSchema-instance" - xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> - <Filter i:type="SqlFilterExpression"> - <SqlExpression>MyProperty='XYZ'</SqlExpression> - </Filter> - <Action i:type="SqlFilterAction"> - <SqlExpression>set MyProperty2 = 'ABC'</SqlExpression> - </Action> -</RuleDescription> -</content> -</entry> - ''' - xmldoc = minidom.parseString(xmlstr) - rule = Rule() - - for rule_desc in _get_children_from_path(xmldoc, - 'entry', - 'content', - 'RuleDescription'): - for xml_filter in _get_child_nodes(rule_desc, 'Filter'): - filter_type = xml_filter.getAttributeNS( - XML_SCHEMA_NAMESPACE, 'type') - setattr(rule, 'filter_type', str(filter_type)) - if xml_filter.childNodes: - - for expr in _get_child_nodes(xml_filter, 'SqlExpression'): - setattr(rule, 'filter_expression', - expr.firstChild.nodeValue) - - for xml_action in _get_child_nodes(rule_desc, 'Action'): - action_type = xml_action.getAttributeNS( - XML_SCHEMA_NAMESPACE, 'type') - setattr(rule, 'action_type', str(action_type)) - if xml_action.childNodes: - action_expression = xml_action.childNodes[0].firstChild - if action_expression: - setattr(rule, 'action_expression', - action_expression.nodeValue) - - # extract id, updated and name value from feed entry and set them of rule. - for name, value in _get_entry_properties(xmlstr, True, '/rules').items(): - setattr(rule, name, value) - - return rule - - -def _convert_response_to_queue(response): - return _convert_xml_to_queue(response.body) - - -def _parse_bool(value): - if value.lower() == 'true': - return True - return False - - -def _convert_xml_to_queue(xmlstr): - ''' Converts xml response to queue object. - - The format of xml response for queue: -<QueueDescription - xmlns=\"http://schemas.microsoft.com/netservices/2010/10/servicebus/connect\"> - <MaxSizeInBytes>10000</MaxSizeInBytes> - <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> - <LockDuration>PT2M</LockDuration> - <RequiresGroupedReceives>False</RequiresGroupedReceives> - <SupportsDuplicateDetection>False</SupportsDuplicateDetection> - ... -</QueueDescription> - - ''' - xmldoc = minidom.parseString(xmlstr) - queue = Queue() - - invalid_queue = True - # get node for each attribute in Queue class, if nothing found then the - # response is not valid xml for Queue. - for desc in _get_children_from_path(xmldoc, - 'entry', - 'content', - 'QueueDescription'): - node_value = _get_first_child_node_value(desc, 'LockDuration') - if node_value is not None: - queue.lock_duration = node_value - invalid_queue = False - - node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') - if node_value is not None: - queue.max_size_in_megabytes = int(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value( - desc, 'RequiresDuplicateDetection') - if node_value is not None: - queue.requires_duplicate_detection = _parse_bool(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value(desc, 'RequiresSession') - if node_value is not None: - queue.requires_session = _parse_bool(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value( - desc, 'DefaultMessageTimeToLive') - if node_value is not None: - queue.default_message_time_to_live = node_value - invalid_queue = False - - node_value = _get_first_child_node_value( - desc, 'DeadLetteringOnMessageExpiration') - if node_value is not None: - queue.dead_lettering_on_message_expiration = _parse_bool(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value( - desc, 'DuplicateDetectionHistoryTimeWindow') - if node_value is not None: - queue.duplicate_detection_history_time_window = node_value - invalid_queue = False - - node_value = _get_first_child_node_value( - desc, 'EnableBatchedOperations') - if node_value is not None: - queue.enable_batched_operations = _parse_bool(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount') - if node_value is not None: - queue.max_delivery_count = int(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value(desc, 'MessageCount') - if node_value is not None: - queue.message_count = int(node_value) - invalid_queue = False - - node_value = _get_first_child_node_value(desc, 'SizeInBytes') - if node_value is not None: - queue.size_in_bytes = int(node_value) - invalid_queue = False - - if invalid_queue: - raise WindowsAzureError(_ERROR_QUEUE_NOT_FOUND) - - # extract id, updated and name value from feed entry and set them of queue. - for name, value in _get_entry_properties(xmlstr, True).items(): - setattr(queue, name, value) - - return queue - - -def _convert_response_to_topic(response): - return _convert_xml_to_topic(response.body) - - -def _convert_xml_to_topic(xmlstr): - '''Converts xml response to topic - - The xml format for topic: -<entry xmlns='http://www.w3.org/2005/Atom'> - <content type='application/xml'> - <TopicDescription - xmlns:i="http://www.w3.org/2001/XMLSchema-instance" - xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> - <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> - <MaxSizeInMegabytes>1024</MaxSizeInMegabytes> - <RequiresDuplicateDetection>false</RequiresDuplicateDetection> - <DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow> - <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> - </TopicDescription> - </content> -</entry> - ''' - xmldoc = minidom.parseString(xmlstr) - topic = Topic() - - invalid_topic = True - - # get node for each attribute in Topic class, if nothing found then the - # response is not valid xml for Topic. - for desc in _get_children_from_path(xmldoc, - 'entry', - 'content', - 'TopicDescription'): - invalid_topic = True - node_value = _get_first_child_node_value( - desc, 'DefaultMessageTimeToLive') - if node_value is not None: - topic.default_message_time_to_live = node_value - invalid_topic = False - node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') - if node_value is not None: - topic.max_size_in_megabytes = int(node_value) - invalid_topic = False - node_value = _get_first_child_node_value( - desc, 'RequiresDuplicateDetection') - if node_value is not None: - topic.requires_duplicate_detection = _parse_bool(node_value) - invalid_topic = False - node_value = _get_first_child_node_value( - desc, 'DuplicateDetectionHistoryTimeWindow') - if node_value is not None: - topic.duplicate_detection_history_time_window = node_value - invalid_topic = False - node_value = _get_first_child_node_value( - desc, 'EnableBatchedOperations') - if node_value is not None: - topic.enable_batched_operations = _parse_bool(node_value) - invalid_topic = False - node_value = _get_first_child_node_value(desc, 'SizeInBytes') - if node_value is not None: - topic.size_in_bytes = int(node_value) - invalid_topic = False - - if invalid_topic: - raise WindowsAzureError(_ERROR_TOPIC_NOT_FOUND) - - # extract id, updated and name value from feed entry and set them of topic. - for name, value in _get_entry_properties(xmlstr, True).items(): - setattr(topic, name, value) - return topic - - -def _convert_response_to_subscription(response): - return _convert_xml_to_subscription(response.body) - - -def _convert_xml_to_subscription(xmlstr): - '''Converts xml response to subscription - - The xml format for subscription: -<entry xmlns='http://www.w3.org/2005/Atom'> - <content type='application/xml'> - <SubscriptionDescription - xmlns:i="http://www.w3.org/2001/XMLSchema-instance" - xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> - <LockDuration>PT5M</LockDuration> - <RequiresSession>false</RequiresSession> - <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> - <DeadLetteringOnMessageExpiration>false</DeadLetteringOnMessageExpiration> - <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> - </SubscriptionDescription> - </content> -</entry> - ''' - xmldoc = minidom.parseString(xmlstr) - subscription = Subscription() - - for desc in _get_children_from_path(xmldoc, - 'entry', - 'content', - 'SubscriptionDescription'): - node_value = _get_first_child_node_value(desc, 'LockDuration') - if node_value is not None: - subscription.lock_duration = node_value - - node_value = _get_first_child_node_value( - desc, 'RequiresSession') - if node_value is not None: - subscription.requires_session = _parse_bool(node_value) - - node_value = _get_first_child_node_value( - desc, 'DefaultMessageTimeToLive') - if node_value is not None: - subscription.default_message_time_to_live = node_value - - node_value = _get_first_child_node_value( - desc, 'DeadLetteringOnFilterEvaluationExceptions') - if node_value is not None: - subscription.dead_lettering_on_filter_evaluation_exceptions = \ - _parse_bool(node_value) - - node_value = _get_first_child_node_value( - desc, 'DeadLetteringOnMessageExpiration') - if node_value is not None: - subscription.dead_lettering_on_message_expiration = \ - _parse_bool(node_value) - - node_value = _get_first_child_node_value( - desc, 'EnableBatchedOperations') - if node_value is not None: - subscription.enable_batched_operations = _parse_bool(node_value) - - node_value = _get_first_child_node_value( - desc, 'MaxDeliveryCount') - if node_value is not None: - subscription.max_delivery_count = int(node_value) - - node_value = _get_first_child_node_value( - desc, 'MessageCount') - if node_value is not None: - subscription.message_count = int(node_value) - - for name, value in _get_entry_properties(xmlstr, - True, - '/subscriptions').items(): - setattr(subscription, name, value) - - return subscription - - -def _convert_subscription_to_xml(subscription): - ''' - Converts a subscription object to xml to send. The order of each field of - subscription in xml is very important so we can't simple call - convert_class_to_xml. - - subscription: the subsciption object to be converted. - ''' - - subscription_body = '<SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' - if subscription: - if subscription.lock_duration is not None: - subscription_body += ''.join( - ['<LockDuration>', - str(subscription.lock_duration), - '</LockDuration>']) - - if subscription.requires_session is not None: - subscription_body += ''.join( - ['<RequiresSession>', - str(subscription.requires_session).lower(), - '</RequiresSession>']) - - if subscription.default_message_time_to_live is not None: - subscription_body += ''.join( - ['<DefaultMessageTimeToLive>', - str(subscription.default_message_time_to_live), - '</DefaultMessageTimeToLive>']) - - if subscription.dead_lettering_on_message_expiration is not None: - subscription_body += ''.join( - ['<DeadLetteringOnMessageExpiration>', - str(subscription.dead_lettering_on_message_expiration).lower(), - '</DeadLetteringOnMessageExpiration>']) - - if subscription.dead_lettering_on_filter_evaluation_exceptions is not None: - subscription_body += ''.join( - ['<DeadLetteringOnFilterEvaluationExceptions>', - str(subscription.dead_lettering_on_filter_evaluation_exceptions).lower(), - '</DeadLetteringOnFilterEvaluationExceptions>']) - - if subscription.enable_batched_operations is not None: - subscription_body += ''.join( - ['<EnableBatchedOperations>', - str(subscription.enable_batched_operations).lower(), - '</EnableBatchedOperations>']) - - if subscription.max_delivery_count is not None: - subscription_body += ''.join( - ['<MaxDeliveryCount>', - str(subscription.max_delivery_count), - '</MaxDeliveryCount>']) - - if subscription.message_count is not None: - subscription_body += ''.join( - ['<MessageCount>', - str(subscription.message_count), - '</MessageCount>']) - - subscription_body += '</SubscriptionDescription>' - return _create_entry(subscription_body) - - -def _convert_rule_to_xml(rule): - ''' - Converts a rule object to xml to send. The order of each field of rule - in xml is very important so we cann't simple call convert_class_to_xml. - - rule: the rule object to be converted. - ''' - rule_body = '<RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' - if rule: - if rule.filter_type: - rule_body += ''.join( - ['<Filter i:type="', - xml_escape(rule.filter_type), - '">']) - if rule.filter_type == 'CorrelationFilter': - rule_body += ''.join( - ['<CorrelationId>', - xml_escape(rule.filter_expression), - '</CorrelationId>']) - else: - rule_body += ''.join( - ['<SqlExpression>', - xml_escape(rule.filter_expression), - '</SqlExpression>']) - rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' - rule_body += '</Filter>' - if rule.action_type: - rule_body += ''.join( - ['<Action i:type="', - xml_escape(rule.action_type), - '">']) - if rule.action_type == 'SqlRuleAction': - rule_body += ''.join( - ['<SqlExpression>', - xml_escape(rule.action_expression), - '</SqlExpression>']) - rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' - rule_body += '</Action>' - rule_body += '</RuleDescription>' - - return _create_entry(rule_body) - - -def _convert_topic_to_xml(topic): - ''' - Converts a topic object to xml to send. The order of each field of topic - in xml is very important so we cann't simple call convert_class_to_xml. - - topic: the topic object to be converted. - ''' - - topic_body = '<TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' - if topic: - if topic.default_message_time_to_live is not None: - topic_body += ''.join( - ['<DefaultMessageTimeToLive>', - str(topic.default_message_time_to_live), - '</DefaultMessageTimeToLive>']) - - if topic.max_size_in_megabytes is not None: - topic_body += ''.join( - ['<MaxSizeInMegabytes>', - str(topic.max_size_in_megabytes), - '</MaxSizeInMegabytes>']) - - if topic.requires_duplicate_detection is not None: - topic_body += ''.join( - ['<RequiresDuplicateDetection>', - str(topic.requires_duplicate_detection).lower(), - '</RequiresDuplicateDetection>']) - - if topic.duplicate_detection_history_time_window is not None: - topic_body += ''.join( - ['<DuplicateDetectionHistoryTimeWindow>', - str(topic.duplicate_detection_history_time_window), - '</DuplicateDetectionHistoryTimeWindow>']) - - if topic.enable_batched_operations is not None: - topic_body += ''.join( - ['<EnableBatchedOperations>', - str(topic.enable_batched_operations).lower(), - '</EnableBatchedOperations>']) - - if topic.size_in_bytes is not None: - topic_body += ''.join( - ['<SizeInBytes>', - str(topic.size_in_bytes), - '</SizeInBytes>']) - - topic_body += '</TopicDescription>' - - return _create_entry(topic_body) - - -def _convert_queue_to_xml(queue): - ''' - Converts a queue object to xml to send. The order of each field of queue - in xml is very important so we cann't simple call convert_class_to_xml. - - queue: the queue object to be converted. - ''' - queue_body = '<QueueDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' - if queue: - if queue.lock_duration: - queue_body += ''.join( - ['<LockDuration>', - str(queue.lock_duration), - '</LockDuration>']) - - if queue.max_size_in_megabytes is not None: - queue_body += ''.join( - ['<MaxSizeInMegabytes>', - str(queue.max_size_in_megabytes), - '</MaxSizeInMegabytes>']) - - if queue.requires_duplicate_detection is not None: - queue_body += ''.join( - ['<RequiresDuplicateDetection>', - str(queue.requires_duplicate_detection).lower(), - '</RequiresDuplicateDetection>']) - - if queue.requires_session is not None: - queue_body += ''.join( - ['<RequiresSession>', - str(queue.requires_session).lower(), - '</RequiresSession>']) - - if queue.default_message_time_to_live is not None: - queue_body += ''.join( - ['<DefaultMessageTimeToLive>', - str(queue.default_message_time_to_live), - '</DefaultMessageTimeToLive>']) - - if queue.dead_lettering_on_message_expiration is not None: - queue_body += ''.join( - ['<DeadLetteringOnMessageExpiration>', - str(queue.dead_lettering_on_message_expiration).lower(), - '</DeadLetteringOnMessageExpiration>']) - - if queue.duplicate_detection_history_time_window is not None: - queue_body += ''.join( - ['<DuplicateDetectionHistoryTimeWindow>', - str(queue.duplicate_detection_history_time_window), - '</DuplicateDetectionHistoryTimeWindow>']) - - if queue.max_delivery_count is not None: - queue_body += ''.join( - ['<MaxDeliveryCount>', - str(queue.max_delivery_count), - '</MaxDeliveryCount>']) - - if queue.enable_batched_operations is not None: - queue_body += ''.join( - ['<EnableBatchedOperations>', - str(queue.enable_batched_operations).lower(), - '</EnableBatchedOperations>']) - - if queue.size_in_bytes is not None: - queue_body += ''.join( - ['<SizeInBytes>', - str(queue.size_in_bytes), - '</SizeInBytes>']) - - if queue.message_count is not None: - queue_body += ''.join( - ['<MessageCount>', - str(queue.message_count), - '</MessageCount>']) - - queue_body += '</QueueDescription>' - return _create_entry(queue_body) - - -def _service_bus_error_handler(http_error): - ''' Simple error handler for service bus service. ''' - return _general_error_handler(http_error) - -from azure.servicebus.servicebusservice import ServiceBusService +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import ast +import json +import sys + +from datetime import datetime +from xml.dom import minidom +from azure import ( + WindowsAzureData, + WindowsAzureError, + xml_escape, + _create_entry, + _general_error_handler, + _get_entry_properties, + _get_child_nodes, + _get_children_from_path, + _get_first_child_node_value, + _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE, + _ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK, + _ERROR_QUEUE_NOT_FOUND, + _ERROR_TOPIC_NOT_FOUND, + ) +from azure.http import HTTPError + +# default rule name for subscription +DEFAULT_RULE_NAME = '$Default' + +#----------------------------------------------------------------------------- +# Constants for Azure app environment settings. +AZURE_SERVICEBUS_NAMESPACE = 'AZURE_SERVICEBUS_NAMESPACE' +AZURE_SERVICEBUS_ACCESS_KEY = 'AZURE_SERVICEBUS_ACCESS_KEY' +AZURE_SERVICEBUS_ISSUER = 'AZURE_SERVICEBUS_ISSUER' + +# namespace used for converting rules to objects +XML_SCHEMA_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance' + + +class Queue(WindowsAzureData): + + ''' Queue class corresponding to Queue Description: + http://msdn.microsoft.com/en-us/library/windowsazure/hh780773''' + + def __init__(self, lock_duration=None, max_size_in_megabytes=None, + requires_duplicate_detection=None, requires_session=None, + default_message_time_to_live=None, + dead_lettering_on_message_expiration=None, + duplicate_detection_history_time_window=None, + max_delivery_count=None, enable_batched_operations=None, + size_in_bytes=None, message_count=None): + + self.lock_duration = lock_duration + self.max_size_in_megabytes = max_size_in_megabytes + self.requires_duplicate_detection = requires_duplicate_detection + self.requires_session = requires_session + self.default_message_time_to_live = default_message_time_to_live + self.dead_lettering_on_message_expiration = \ + dead_lettering_on_message_expiration + self.duplicate_detection_history_time_window = \ + duplicate_detection_history_time_window + self.max_delivery_count = max_delivery_count + self.enable_batched_operations = enable_batched_operations + self.size_in_bytes = size_in_bytes + self.message_count = message_count + + +class Topic(WindowsAzureData): + + ''' Topic class corresponding to Topic Description: + http://msdn.microsoft.com/en-us/library/windowsazure/hh780749. ''' + + def __init__(self, default_message_time_to_live=None, + max_size_in_megabytes=None, requires_duplicate_detection=None, + duplicate_detection_history_time_window=None, + enable_batched_operations=None, size_in_bytes=None): + + self.default_message_time_to_live = default_message_time_to_live + self.max_size_in_megabytes = max_size_in_megabytes + self.requires_duplicate_detection = requires_duplicate_detection + self.duplicate_detection_history_time_window = \ + duplicate_detection_history_time_window + self.enable_batched_operations = enable_batched_operations + self.size_in_bytes = size_in_bytes + + @property + def max_size_in_mega_bytes(self): + import warnings + warnings.warn( + 'This attribute has been changed to max_size_in_megabytes.') + return self.max_size_in_megabytes + + @max_size_in_mega_bytes.setter + def max_size_in_mega_bytes(self, value): + self.max_size_in_megabytes = value + + +class Subscription(WindowsAzureData): + + ''' Subscription class corresponding to Subscription Description: + http://msdn.microsoft.com/en-us/library/windowsazure/hh780763. ''' + + def __init__(self, lock_duration=None, requires_session=None, + default_message_time_to_live=None, + dead_lettering_on_message_expiration=None, + dead_lettering_on_filter_evaluation_exceptions=None, + enable_batched_operations=None, max_delivery_count=None, + message_count=None): + + self.lock_duration = lock_duration + self.requires_session = requires_session + self.default_message_time_to_live = default_message_time_to_live + self.dead_lettering_on_message_expiration = \ + dead_lettering_on_message_expiration + self.dead_lettering_on_filter_evaluation_exceptions = \ + dead_lettering_on_filter_evaluation_exceptions + self.enable_batched_operations = enable_batched_operations + self.max_delivery_count = max_delivery_count + self.message_count = message_count + + +class Rule(WindowsAzureData): + + ''' Rule class corresponding to Rule Description: + http://msdn.microsoft.com/en-us/library/windowsazure/hh780753. ''' + + def __init__(self, filter_type=None, filter_expression=None, + action_type=None, action_expression=None): + self.filter_type = filter_type + self.filter_expression = filter_expression + self.action_type = action_type + self.action_expression = action_type + + +class Message(WindowsAzureData): + + ''' Message class that used in send message/get mesage apis. ''' + + def __init__(self, body=None, service_bus_service=None, location=None, + custom_properties=None, + type='application/atom+xml;type=entry;charset=utf-8', + broker_properties=None): + self.body = body + self.location = location + self.broker_properties = broker_properties + self.custom_properties = custom_properties + self.type = type + self.service_bus_service = service_bus_service + self._topic_name = None + self._subscription_name = None + self._queue_name = None + + if not service_bus_service: + return + + # if location is set, then extracts the queue name for queue message and + # extracts the topic and subscriptions name if it is topic message. + if location: + if '/subscriptions/' in location: + pos = location.find(service_bus_service.host_base.lower())+1 + pos1 = location.find('/subscriptions/') + self._topic_name = location[pos+len(service_bus_service.host_base):pos1] + pos = pos1 + len('/subscriptions/') + pos1 = location.find('/', pos) + self._subscription_name = location[pos:pos1] + elif '/messages/' in location: + pos = location.find(service_bus_service.host_base.lower())+1 + pos1 = location.find('/messages/') + self._queue_name = location[pos+len(service_bus_service.host_base):pos1] + + def delete(self): + ''' Deletes itself if find queue name or topic name and subscription + name. ''' + if self._queue_name: + self.service_bus_service.delete_queue_message( + self._queue_name, + self.broker_properties['SequenceNumber'], + self.broker_properties['LockToken']) + elif self._topic_name and self._subscription_name: + self.service_bus_service.delete_subscription_message( + self._topic_name, + self._subscription_name, + self.broker_properties['SequenceNumber'], + self.broker_properties['LockToken']) + else: + raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE) + + def unlock(self): + ''' Unlocks itself if find queue name or topic name and subscription + name. ''' + if self._queue_name: + self.service_bus_service.unlock_queue_message( + self._queue_name, + self.broker_properties['SequenceNumber'], + self.broker_properties['LockToken']) + elif self._topic_name and self._subscription_name: + self.service_bus_service.unlock_subscription_message( + self._topic_name, + self._subscription_name, + self.broker_properties['SequenceNumber'], + self.broker_properties['LockToken']) + else: + raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK) + + def add_headers(self, request): + ''' add addtional headers to request for message request.''' + + # Adds custom properties + if self.custom_properties: + for name, value in self.custom_properties.items(): + if sys.version_info < (3,) and isinstance(value, unicode): + request.headers.append( + (name, '"' + value.encode('utf-8') + '"')) + elif isinstance(value, str): + request.headers.append((name, '"' + str(value) + '"')) + elif isinstance(value, datetime): + request.headers.append( + (name, '"' + value.strftime('%a, %d %b %Y %H:%M:%S GMT') + '"')) + else: + request.headers.append((name, str(value).lower())) + + # Adds content-type + request.headers.append(('Content-Type', self.type)) + + # Adds BrokerProperties + if self.broker_properties: + request.headers.append( + ('BrokerProperties', str(self.broker_properties))) + + return request.headers + + +def _create_message(response, service_instance): + ''' Create message from response. + + response: response from service bus cloud server. + service_instance: the service bus client. + ''' + respbody = response.body + custom_properties = {} + broker_properties = None + message_type = None + message_location = None + + # gets all information from respheaders. + for name, value in response.headers: + if name.lower() == 'brokerproperties': + broker_properties = json.loads(value) + elif name.lower() == 'content-type': + message_type = value + elif name.lower() == 'location': + message_location = value + elif name.lower() not in ['content-type', + 'brokerproperties', + 'transfer-encoding', + 'server', + 'location', + 'date']: + if '"' in value: + value = value[1:-1] + try: + custom_properties[name] = datetime.strptime( + value, '%a, %d %b %Y %H:%M:%S GMT') + except ValueError: + custom_properties[name] = value + else: # only int, float or boolean + if value.lower() == 'true': + custom_properties[name] = True + elif value.lower() == 'false': + custom_properties[name] = False + # int('3.1') doesn't work so need to get float('3.14') first + elif str(int(float(value))) == value: + custom_properties[name] = int(value) + else: + custom_properties[name] = float(value) + + if message_type == None: + message = Message( + respbody, service_instance, message_location, custom_properties, + 'application/atom+xml;type=entry;charset=utf-8', broker_properties) + else: + message = Message(respbody, service_instance, message_location, + custom_properties, message_type, broker_properties) + return message + +# convert functions + + +def _convert_response_to_rule(response): + return _convert_xml_to_rule(response.body) + + +def _convert_xml_to_rule(xmlstr): + ''' Converts response xml to rule object. + + The format of xml for rule: +<entry xmlns='http://www.w3.org/2005/Atom'> +<content type='application/xml'> +<RuleDescription + xmlns:i="http://www.w3.org/2001/XMLSchema-instance" + xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> + <Filter i:type="SqlFilterExpression"> + <SqlExpression>MyProperty='XYZ'</SqlExpression> + </Filter> + <Action i:type="SqlFilterAction"> + <SqlExpression>set MyProperty2 = 'ABC'</SqlExpression> + </Action> +</RuleDescription> +</content> +</entry> + ''' + xmldoc = minidom.parseString(xmlstr) + rule = Rule() + + for rule_desc in _get_children_from_path(xmldoc, + 'entry', + 'content', + 'RuleDescription'): + for xml_filter in _get_child_nodes(rule_desc, 'Filter'): + filter_type = xml_filter.getAttributeNS( + XML_SCHEMA_NAMESPACE, 'type') + setattr(rule, 'filter_type', str(filter_type)) + if xml_filter.childNodes: + + for expr in _get_child_nodes(xml_filter, 'SqlExpression'): + setattr(rule, 'filter_expression', + expr.firstChild.nodeValue) + + for xml_action in _get_child_nodes(rule_desc, 'Action'): + action_type = xml_action.getAttributeNS( + XML_SCHEMA_NAMESPACE, 'type') + setattr(rule, 'action_type', str(action_type)) + if xml_action.childNodes: + action_expression = xml_action.childNodes[0].firstChild + if action_expression: + setattr(rule, 'action_expression', + action_expression.nodeValue) + + # extract id, updated and name value from feed entry and set them of rule. + for name, value in _get_entry_properties(xmlstr, True, '/rules').items(): + setattr(rule, name, value) + + return rule + + +def _convert_response_to_queue(response): + return _convert_xml_to_queue(response.body) + + +def _parse_bool(value): + if value.lower() == 'true': + return True + return False + + +def _convert_xml_to_queue(xmlstr): + ''' Converts xml response to queue object. + + The format of xml response for queue: +<QueueDescription + xmlns=\"http://schemas.microsoft.com/netservices/2010/10/servicebus/connect\"> + <MaxSizeInBytes>10000</MaxSizeInBytes> + <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> + <LockDuration>PT2M</LockDuration> + <RequiresGroupedReceives>False</RequiresGroupedReceives> + <SupportsDuplicateDetection>False</SupportsDuplicateDetection> + ... +</QueueDescription> + + ''' + xmldoc = minidom.parseString(xmlstr) + queue = Queue() + + invalid_queue = True + # get node for each attribute in Queue class, if nothing found then the + # response is not valid xml for Queue. + for desc in _get_children_from_path(xmldoc, + 'entry', + 'content', + 'QueueDescription'): + node_value = _get_first_child_node_value(desc, 'LockDuration') + if node_value is not None: + queue.lock_duration = node_value + invalid_queue = False + + node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') + if node_value is not None: + queue.max_size_in_megabytes = int(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value( + desc, 'RequiresDuplicateDetection') + if node_value is not None: + queue.requires_duplicate_detection = _parse_bool(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value(desc, 'RequiresSession') + if node_value is not None: + queue.requires_session = _parse_bool(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value( + desc, 'DefaultMessageTimeToLive') + if node_value is not None: + queue.default_message_time_to_live = node_value + invalid_queue = False + + node_value = _get_first_child_node_value( + desc, 'DeadLetteringOnMessageExpiration') + if node_value is not None: + queue.dead_lettering_on_message_expiration = _parse_bool(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value( + desc, 'DuplicateDetectionHistoryTimeWindow') + if node_value is not None: + queue.duplicate_detection_history_time_window = node_value + invalid_queue = False + + node_value = _get_first_child_node_value( + desc, 'EnableBatchedOperations') + if node_value is not None: + queue.enable_batched_operations = _parse_bool(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount') + if node_value is not None: + queue.max_delivery_count = int(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value(desc, 'MessageCount') + if node_value is not None: + queue.message_count = int(node_value) + invalid_queue = False + + node_value = _get_first_child_node_value(desc, 'SizeInBytes') + if node_value is not None: + queue.size_in_bytes = int(node_value) + invalid_queue = False + + if invalid_queue: + raise WindowsAzureError(_ERROR_QUEUE_NOT_FOUND) + + # extract id, updated and name value from feed entry and set them of queue. + for name, value in _get_entry_properties(xmlstr, True).items(): + setattr(queue, name, value) + + return queue + + +def _convert_response_to_topic(response): + return _convert_xml_to_topic(response.body) + + +def _convert_xml_to_topic(xmlstr): + '''Converts xml response to topic + + The xml format for topic: +<entry xmlns='http://www.w3.org/2005/Atom'> + <content type='application/xml'> + <TopicDescription + xmlns:i="http://www.w3.org/2001/XMLSchema-instance" + xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> + <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> + <MaxSizeInMegabytes>1024</MaxSizeInMegabytes> + <RequiresDuplicateDetection>false</RequiresDuplicateDetection> + <DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow> + <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> + </TopicDescription> + </content> +</entry> + ''' + xmldoc = minidom.parseString(xmlstr) + topic = Topic() + + invalid_topic = True + + # get node for each attribute in Topic class, if nothing found then the + # response is not valid xml for Topic. + for desc in _get_children_from_path(xmldoc, + 'entry', + 'content', + 'TopicDescription'): + invalid_topic = True + node_value = _get_first_child_node_value( + desc, 'DefaultMessageTimeToLive') + if node_value is not None: + topic.default_message_time_to_live = node_value + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes') + if node_value is not None: + topic.max_size_in_megabytes = int(node_value) + invalid_topic = False + node_value = _get_first_child_node_value( + desc, 'RequiresDuplicateDetection') + if node_value is not None: + topic.requires_duplicate_detection = _parse_bool(node_value) + invalid_topic = False + node_value = _get_first_child_node_value( + desc, 'DuplicateDetectionHistoryTimeWindow') + if node_value is not None: + topic.duplicate_detection_history_time_window = node_value + invalid_topic = False + node_value = _get_first_child_node_value( + desc, 'EnableBatchedOperations') + if node_value is not None: + topic.enable_batched_operations = _parse_bool(node_value) + invalid_topic = False + node_value = _get_first_child_node_value(desc, 'SizeInBytes') + if node_value is not None: + topic.size_in_bytes = int(node_value) + invalid_topic = False + + if invalid_topic: + raise WindowsAzureError(_ERROR_TOPIC_NOT_FOUND) + + # extract id, updated and name value from feed entry and set them of topic. + for name, value in _get_entry_properties(xmlstr, True).items(): + setattr(topic, name, value) + return topic + + +def _convert_response_to_subscription(response): + return _convert_xml_to_subscription(response.body) + + +def _convert_xml_to_subscription(xmlstr): + '''Converts xml response to subscription + + The xml format for subscription: +<entry xmlns='http://www.w3.org/2005/Atom'> + <content type='application/xml'> + <SubscriptionDescription + xmlns:i="http://www.w3.org/2001/XMLSchema-instance" + xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> + <LockDuration>PT5M</LockDuration> + <RequiresSession>false</RequiresSession> + <DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive> + <DeadLetteringOnMessageExpiration>false</DeadLetteringOnMessageExpiration> + <DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions> + </SubscriptionDescription> + </content> +</entry> + ''' + xmldoc = minidom.parseString(xmlstr) + subscription = Subscription() + + for desc in _get_children_from_path(xmldoc, + 'entry', + 'content', + 'SubscriptionDescription'): + node_value = _get_first_child_node_value(desc, 'LockDuration') + if node_value is not None: + subscription.lock_duration = node_value + + node_value = _get_first_child_node_value( + desc, 'RequiresSession') + if node_value is not None: + subscription.requires_session = _parse_bool(node_value) + + node_value = _get_first_child_node_value( + desc, 'DefaultMessageTimeToLive') + if node_value is not None: + subscription.default_message_time_to_live = node_value + + node_value = _get_first_child_node_value( + desc, 'DeadLetteringOnFilterEvaluationExceptions') + if node_value is not None: + subscription.dead_lettering_on_filter_evaluation_exceptions = \ + _parse_bool(node_value) + + node_value = _get_first_child_node_value( + desc, 'DeadLetteringOnMessageExpiration') + if node_value is not None: + subscription.dead_lettering_on_message_expiration = \ + _parse_bool(node_value) + + node_value = _get_first_child_node_value( + desc, 'EnableBatchedOperations') + if node_value is not None: + subscription.enable_batched_operations = _parse_bool(node_value) + + node_value = _get_first_child_node_value( + desc, 'MaxDeliveryCount') + if node_value is not None: + subscription.max_delivery_count = int(node_value) + + node_value = _get_first_child_node_value( + desc, 'MessageCount') + if node_value is not None: + subscription.message_count = int(node_value) + + for name, value in _get_entry_properties(xmlstr, + True, + '/subscriptions').items(): + setattr(subscription, name, value) + + return subscription + + +def _convert_subscription_to_xml(subscription): + ''' + Converts a subscription object to xml to send. The order of each field of + subscription in xml is very important so we can't simple call + convert_class_to_xml. + + subscription: the subsciption object to be converted. + ''' + + subscription_body = '<SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' + if subscription: + if subscription.lock_duration is not None: + subscription_body += ''.join( + ['<LockDuration>', + str(subscription.lock_duration), + '</LockDuration>']) + + if subscription.requires_session is not None: + subscription_body += ''.join( + ['<RequiresSession>', + str(subscription.requires_session).lower(), + '</RequiresSession>']) + + if subscription.default_message_time_to_live is not None: + subscription_body += ''.join( + ['<DefaultMessageTimeToLive>', + str(subscription.default_message_time_to_live), + '</DefaultMessageTimeToLive>']) + + if subscription.dead_lettering_on_message_expiration is not None: + subscription_body += ''.join( + ['<DeadLetteringOnMessageExpiration>', + str(subscription.dead_lettering_on_message_expiration).lower(), + '</DeadLetteringOnMessageExpiration>']) + + if subscription.dead_lettering_on_filter_evaluation_exceptions is not None: + subscription_body += ''.join( + ['<DeadLetteringOnFilterEvaluationExceptions>', + str(subscription.dead_lettering_on_filter_evaluation_exceptions).lower(), + '</DeadLetteringOnFilterEvaluationExceptions>']) + + if subscription.enable_batched_operations is not None: + subscription_body += ''.join( + ['<EnableBatchedOperations>', + str(subscription.enable_batched_operations).lower(), + '</EnableBatchedOperations>']) + + if subscription.max_delivery_count is not None: + subscription_body += ''.join( + ['<MaxDeliveryCount>', + str(subscription.max_delivery_count), + '</MaxDeliveryCount>']) + + if subscription.message_count is not None: + subscription_body += ''.join( + ['<MessageCount>', + str(subscription.message_count), + '</MessageCount>']) + + subscription_body += '</SubscriptionDescription>' + return _create_entry(subscription_body) + + +def _convert_rule_to_xml(rule): + ''' + Converts a rule object to xml to send. The order of each field of rule + in xml is very important so we cann't simple call convert_class_to_xml. + + rule: the rule object to be converted. + ''' + rule_body = '<RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' + if rule: + if rule.filter_type: + rule_body += ''.join( + ['<Filter i:type="', + xml_escape(rule.filter_type), + '">']) + if rule.filter_type == 'CorrelationFilter': + rule_body += ''.join( + ['<CorrelationId>', + xml_escape(rule.filter_expression), + '</CorrelationId>']) + else: + rule_body += ''.join( + ['<SqlExpression>', + xml_escape(rule.filter_expression), + '</SqlExpression>']) + rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' + rule_body += '</Filter>' + if rule.action_type: + rule_body += ''.join( + ['<Action i:type="', + xml_escape(rule.action_type), + '">']) + if rule.action_type == 'SqlRuleAction': + rule_body += ''.join( + ['<SqlExpression>', + xml_escape(rule.action_expression), + '</SqlExpression>']) + rule_body += '<CompatibilityLevel>20</CompatibilityLevel>' + rule_body += '</Action>' + rule_body += '</RuleDescription>' + + return _create_entry(rule_body) + + +def _convert_topic_to_xml(topic): + ''' + Converts a topic object to xml to send. The order of each field of topic + in xml is very important so we cann't simple call convert_class_to_xml. + + topic: the topic object to be converted. + ''' + + topic_body = '<TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' + if topic: + if topic.default_message_time_to_live is not None: + topic_body += ''.join( + ['<DefaultMessageTimeToLive>', + str(topic.default_message_time_to_live), + '</DefaultMessageTimeToLive>']) + + if topic.max_size_in_megabytes is not None: + topic_body += ''.join( + ['<MaxSizeInMegabytes>', + str(topic.max_size_in_megabytes), + '</MaxSizeInMegabytes>']) + + if topic.requires_duplicate_detection is not None: + topic_body += ''.join( + ['<RequiresDuplicateDetection>', + str(topic.requires_duplicate_detection).lower(), + '</RequiresDuplicateDetection>']) + + if topic.duplicate_detection_history_time_window is not None: + topic_body += ''.join( + ['<DuplicateDetectionHistoryTimeWindow>', + str(topic.duplicate_detection_history_time_window), + '</DuplicateDetectionHistoryTimeWindow>']) + + if topic.enable_batched_operations is not None: + topic_body += ''.join( + ['<EnableBatchedOperations>', + str(topic.enable_batched_operations).lower(), + '</EnableBatchedOperations>']) + + if topic.size_in_bytes is not None: + topic_body += ''.join( + ['<SizeInBytes>', + str(topic.size_in_bytes), + '</SizeInBytes>']) + + topic_body += '</TopicDescription>' + + return _create_entry(topic_body) + + +def _convert_queue_to_xml(queue): + ''' + Converts a queue object to xml to send. The order of each field of queue + in xml is very important so we cann't simple call convert_class_to_xml. + + queue: the queue object to be converted. + ''' + queue_body = '<QueueDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' + if queue: + if queue.lock_duration: + queue_body += ''.join( + ['<LockDuration>', + str(queue.lock_duration), + '</LockDuration>']) + + if queue.max_size_in_megabytes is not None: + queue_body += ''.join( + ['<MaxSizeInMegabytes>', + str(queue.max_size_in_megabytes), + '</MaxSizeInMegabytes>']) + + if queue.requires_duplicate_detection is not None: + queue_body += ''.join( + ['<RequiresDuplicateDetection>', + str(queue.requires_duplicate_detection).lower(), + '</RequiresDuplicateDetection>']) + + if queue.requires_session is not None: + queue_body += ''.join( + ['<RequiresSession>', + str(queue.requires_session).lower(), + '</RequiresSession>']) + + if queue.default_message_time_to_live is not None: + queue_body += ''.join( + ['<DefaultMessageTimeToLive>', + str(queue.default_message_time_to_live), + '</DefaultMessageTimeToLive>']) + + if queue.dead_lettering_on_message_expiration is not None: + queue_body += ''.join( + ['<DeadLetteringOnMessageExpiration>', + str(queue.dead_lettering_on_message_expiration).lower(), + '</DeadLetteringOnMessageExpiration>']) + + if queue.duplicate_detection_history_time_window is not None: + queue_body += ''.join( + ['<DuplicateDetectionHistoryTimeWindow>', + str(queue.duplicate_detection_history_time_window), + '</DuplicateDetectionHistoryTimeWindow>']) + + if queue.max_delivery_count is not None: + queue_body += ''.join( + ['<MaxDeliveryCount>', + str(queue.max_delivery_count), + '</MaxDeliveryCount>']) + + if queue.enable_batched_operations is not None: + queue_body += ''.join( + ['<EnableBatchedOperations>', + str(queue.enable_batched_operations).lower(), + '</EnableBatchedOperations>']) + + if queue.size_in_bytes is not None: + queue_body += ''.join( + ['<SizeInBytes>', + str(queue.size_in_bytes), + '</SizeInBytes>']) + + if queue.message_count is not None: + queue_body += ''.join( + ['<MessageCount>', + str(queue.message_count), + '</MessageCount>']) + + queue_body += '</QueueDescription>' + return _create_entry(queue_body) + + +def _service_bus_error_handler(http_error): + ''' Simple error handler for service bus service. ''' + return _general_error_handler(http_error) + +from azure.servicebus.servicebusservice import ServiceBusService diff --git a/awx/lib/site-packages/azure/servicebus/servicebusservice.py b/awx/lib/site-packages/azure/servicebus/servicebusservice.py index 894f018ba2..dcd5fd6ed3 100644 --- a/awx/lib/site-packages/azure/servicebus/servicebusservice.py +++ b/awx/lib/site-packages/azure/servicebus/servicebusservice.py @@ -1,914 +1,1011 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import os -import time - -from azure import ( - WindowsAzureError, - SERVICE_BUS_HOST_BASE, - _convert_response_to_feeds, - _dont_fail_not_exist, - _dont_fail_on_exist, - _get_request_body, - _get_request_body_bytes_only, - _int_or_none, - _str, - _update_request_uri_query, - url_quote, - url_unquote, - _validate_not_none, - ) -from azure.http import ( - HTTPError, - HTTPRequest, - ) -from azure.http.httpclient import _HTTPClient -from azure.servicebus import ( - AZURE_SERVICEBUS_NAMESPACE, - AZURE_SERVICEBUS_ACCESS_KEY, - AZURE_SERVICEBUS_ISSUER, - _convert_topic_to_xml, - _convert_response_to_topic, - _convert_queue_to_xml, - _convert_response_to_queue, - _convert_subscription_to_xml, - _convert_response_to_subscription, - _convert_rule_to_xml, - _convert_response_to_rule, - _convert_xml_to_queue, - _convert_xml_to_topic, - _convert_xml_to_subscription, - _convert_xml_to_rule, - _create_message, - _service_bus_error_handler, - ) - -# Token cache for Authentication -# Shared by the different instances of ServiceBusService -_tokens = {} - - -class ServiceBusService(object): - - def __init__(self, service_namespace=None, account_key=None, issuer=None, - x_ms_version='2011-06-01', host_base=SERVICE_BUS_HOST_BASE): - # x_ms_version is not used, but the parameter is kept for backwards - # compatibility - self.requestid = None - self.service_namespace = service_namespace - self.account_key = account_key - self.issuer = issuer - self.host_base = host_base - - # Get service namespace, account key and issuer. - # If they are set when constructing, then use them, else find them - # from environment variables. - if not self.service_namespace: - self.service_namespace = os.environ.get(AZURE_SERVICEBUS_NAMESPACE) - if not self.account_key: - self.account_key = os.environ.get(AZURE_SERVICEBUS_ACCESS_KEY) - if not self.issuer: - self.issuer = os.environ.get(AZURE_SERVICEBUS_ISSUER) - - if not self.service_namespace or \ - not self.account_key or not self.issuer: - raise WindowsAzureError( - 'You need to provide servicebus namespace, access key and Issuer') - - self._httpclient = _HTTPClient(service_instance=self, - service_namespace=self.service_namespace, - account_key=self.account_key, - issuer=self.issuer) - self._filter = self._httpclient.perform_request - - def with_filter(self, filter): - ''' - Returns a new service which will process requests with the specified - filter. Filtering operations can include logging, automatic retrying, - etc... The filter is a lambda which receives the HTTPRequest and - another lambda. The filter can perform any pre-processing on the - request, pass it off to the next lambda, and then perform any - post-processing on the response. - ''' - res = ServiceBusService(self.service_namespace, self.account_key, - self.issuer) - old_filter = self._filter - - def new_filter(request): - return filter(request, old_filter) - - res._filter = new_filter - return res - - def set_proxy(self, host, port, user=None, password=None): - ''' - Sets the proxy server host and port for the HTTP CONNECT Tunnelling. - - host: Address of the proxy. Ex: '192.168.0.100' - port: Port of the proxy. Ex: 6000 - user: User for proxy authorization. - password: Password for proxy authorization. - ''' - self._httpclient.set_proxy(host, port, user, password) - - def create_queue(self, queue_name, queue=None, fail_on_exist=False): - ''' - Creates a new queue. Once created, this queue's resource manifest is - immutable. - - queue_name: Name of the queue to create. - queue: Queue object to create. - fail_on_exist: - Specify whether to throw an exception when the queue exists. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '' - request.body = _get_request_body(_convert_queue_to_xml(queue)) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def delete_queue(self, queue_name, fail_not_exist=False): - ''' - Deletes an existing queue. This operation will also remove all - associated state including messages in the queue. - - queue_name: Name of the queue to delete. - fail_not_exist: - Specify whether to throw an exception if the queue doesn't exist. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_queue(self, queue_name): - ''' - Retrieves an existing queue. - - queue_name: Name of the queue. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_queue(response) - - def list_queues(self): - ''' - Enumerates the queues in the service namespace. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/$Resources/Queues' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, _convert_xml_to_queue) - - def create_topic(self, topic_name, topic=None, fail_on_exist=False): - ''' - Creates a new topic. Once created, this topic resource manifest is - immutable. - - topic_name: Name of the topic to create. - topic: Topic object to create. - fail_on_exist: - Specify whether to throw an exception when the topic exists. - ''' - _validate_not_none('topic_name', topic_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '' - request.body = _get_request_body(_convert_topic_to_xml(topic)) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def delete_topic(self, topic_name, fail_not_exist=False): - ''' - Deletes an existing topic. This operation will also remove all - associated state including associated subscriptions. - - topic_name: Name of the topic to delete. - fail_not_exist: - Specify whether throw exception when topic doesn't exist. - ''' - _validate_not_none('topic_name', topic_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_topic(self, topic_name): - ''' - Retrieves the description for the specified topic. - - topic_name: Name of the topic. - ''' - _validate_not_none('topic_name', topic_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_topic(response) - - def list_topics(self): - ''' - Retrieves the topics in the service namespace. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/$Resources/Topics' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, _convert_xml_to_topic) - - def create_rule(self, topic_name, subscription_name, rule_name, rule=None, - fail_on_exist=False): - ''' - Creates a new rule. Once created, this rule's resource manifest is - immutable. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - rule_name: Name of the rule. - fail_on_exist: - Specify whether to throw an exception when the rule exists. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - _validate_not_none('rule_name', rule_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '/subscriptions/' + \ - _str(subscription_name) + \ - '/rules/' + _str(rule_name) + '' - request.body = _get_request_body(_convert_rule_to_xml(rule)) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def delete_rule(self, topic_name, subscription_name, rule_name, - fail_not_exist=False): - ''' - Deletes an existing rule. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - rule_name: - Name of the rule to delete. DEFAULT_RULE_NAME=$Default. - Use DEFAULT_RULE_NAME to delete default rule for the subscription. - fail_not_exist: - Specify whether throw exception when rule doesn't exist. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - _validate_not_none('rule_name', rule_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '/subscriptions/' + \ - _str(subscription_name) + \ - '/rules/' + _str(rule_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_rule(self, topic_name, subscription_name, rule_name): - ''' - Retrieves the description for the specified rule. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - rule_name: Name of the rule. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - _validate_not_none('rule_name', rule_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '/subscriptions/' + \ - _str(subscription_name) + \ - '/rules/' + _str(rule_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_rule(response) - - def list_rules(self, topic_name, subscription_name): - ''' - Retrieves the rules that exist under the specified subscription. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(topic_name) + '/subscriptions/' + \ - _str(subscription_name) + '/rules/' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, _convert_xml_to_rule) - - def create_subscription(self, topic_name, subscription_name, - subscription=None, fail_on_exist=False): - ''' - Creates a new subscription. Once created, this subscription resource - manifest is immutable. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - fail_on_exist: - Specify whether throw exception when subscription exists. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' - request.body = _get_request_body( - _convert_subscription_to_xml(subscription)) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def delete_subscription(self, topic_name, subscription_name, - fail_not_exist=False): - ''' - Deletes an existing subscription. - - topic_name: Name of the topic. - subscription_name: Name of the subscription to delete. - fail_not_exist: - Specify whether to throw an exception when the subscription - doesn't exist. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + \ - _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_subscription(self, topic_name, subscription_name): - ''' - Gets an existing subscription. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_subscription(response) - - def list_subscriptions(self, topic_name): - ''' - Retrieves the subscriptions in the specified topic. - - topic_name: Name of the topic. - ''' - _validate_not_none('topic_name', topic_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '/subscriptions/' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, - _convert_xml_to_subscription) - - def send_topic_message(self, topic_name, message=None): - ''' - Enqueues a message into the specified topic. The limit to the number - of messages which may be present in the topic is governed by the - message size in MaxTopicSizeInBytes. If this message causes the topic - to exceed its quota, a quota exceeded error is returned and the - message will be rejected. - - topic_name: Name of the topic. - message: Message object containing message body and properties. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('message', message) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + '/messages' - request.headers = message.add_headers(request) - request.body = _get_request_body_bytes_only( - 'message.body', message.body) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def peek_lock_subscription_message(self, topic_name, subscription_name, - timeout='60'): - ''' - This operation is used to atomically retrieve and lock a message for - processing. The message is guaranteed not to be delivered to other - receivers during the lock duration period specified in buffer - description. Once the lock expires, the message will be available to - other receivers (on the same subscription only) during the lock - duration period specified in the topic description. Once the lock - expires, the message will be available to other receivers. In order to - complete processing of the message, the receiver should issue a delete - command with the lock ID received from this operation. To abandon - processing of the message and unlock it for other receivers, an Unlock - Message command should be issued, or the lock duration period can - expire. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + \ - _str(topic_name) + '/subscriptions/' + \ - _str(subscription_name) + '/messages/head' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _create_message(response, self) - - def unlock_subscription_message(self, topic_name, subscription_name, - sequence_number, lock_token): - ''' - Unlock a message for processing by other receivers on a given - subscription. This operation deletes the lock object, causing the - message to be unlocked. A message must have first been locked by a - receiver before this operation is called. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - sequence_number: - The sequence number of the message to be unlocked as returned in - BrokerProperties['SequenceNumber'] by the Peek Message operation. - lock_token: - The ID of the lock as returned by the Peek Message operation in - BrokerProperties['LockToken'] - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - _validate_not_none('sequence_number', sequence_number) - _validate_not_none('lock_token', lock_token) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + \ - '/subscriptions/' + str(subscription_name) + \ - '/messages/' + _str(sequence_number) + \ - '/' + _str(lock_token) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def read_delete_subscription_message(self, topic_name, subscription_name, - timeout='60'): - ''' - Read and delete a message from a subscription as an atomic operation. - This operation should be used when a best-effort guarantee is - sufficient for an application; that is, using this operation it is - possible for messages to be lost if processing fails. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + \ - '/subscriptions/' + _str(subscription_name) + \ - '/messages/head' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _create_message(response, self) - - def delete_subscription_message(self, topic_name, subscription_name, - sequence_number, lock_token): - ''' - Completes processing on a locked message and delete it from the - subscription. This operation should only be called after processing a - previously locked message is successful to maintain At-Least-Once - delivery assurances. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - sequence_number: - The sequence number of the message to be deleted as returned in - BrokerProperties['SequenceNumber'] by the Peek Message operation. - lock_token: - The ID of the lock as returned by the Peek Message operation in - BrokerProperties['LockToken'] - ''' - _validate_not_none('topic_name', topic_name) - _validate_not_none('subscription_name', subscription_name) - _validate_not_none('sequence_number', sequence_number) - _validate_not_none('lock_token', lock_token) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(topic_name) + \ - '/subscriptions/' + _str(subscription_name) + \ - '/messages/' + _str(sequence_number) + \ - '/' + _str(lock_token) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def send_queue_message(self, queue_name, message=None): - ''' - Sends a message into the specified queue. The limit to the number of - messages which may be present in the topic is governed by the message - size the MaxTopicSizeInMegaBytes. If this message will cause the queue - to exceed its quota, a quota exceeded error is returned and the - message will be rejected. - - queue_name: Name of the queue. - message: Message object containing message body and properties. - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('message', message) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages' - request.headers = message.add_headers(request) - request.body = _get_request_body_bytes_only('message.body', - message.body) - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def peek_lock_queue_message(self, queue_name, timeout='60'): - ''' - Automically retrieves and locks a message from a queue for processing. - The message is guaranteed not to be delivered to other receivers (on - the same subscription only) during the lock duration period specified - in the queue description. Once the lock expires, the message will be - available to other receivers. In order to complete processing of the - message, the receiver should issue a delete command with the lock ID - received from this operation. To abandon processing of the message and - unlock it for other receivers, an Unlock Message command should be - issued, or the lock duration period can expire. - - queue_name: Name of the queue. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages/head' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _create_message(response, self) - - def unlock_queue_message(self, queue_name, sequence_number, lock_token): - ''' - Unlocks a message for processing by other receivers on a given - subscription. This operation deletes the lock object, causing the - message to be unlocked. A message must have first been locked by a - receiver before this operation is called. - - queue_name: Name of the queue. - sequence_number: - The sequence number of the message to be unlocked as returned in - BrokerProperties['SequenceNumber'] by the Peek Message operation. - lock_token: - The ID of the lock as returned by the Peek Message operation in - BrokerProperties['LockToken'] - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('sequence_number', sequence_number) - _validate_not_none('lock_token', lock_token) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + \ - '/messages/' + _str(sequence_number) + \ - '/' + _str(lock_token) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def read_delete_queue_message(self, queue_name, timeout='60'): - ''' - Reads and deletes a message from a queue as an atomic operation. This - operation should be used when a best-effort guarantee is sufficient - for an application; that is, using this operation it is possible for - messages to be lost if processing fails. - - queue_name: Name of the queue. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages/head' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - response = self._perform_request(request) - - return _create_message(response, self) - - def delete_queue_message(self, queue_name, sequence_number, lock_token): - ''' - Completes processing on a locked message and delete it from the queue. - This operation should only be called after processing a previously - locked message is successful to maintain At-Least-Once delivery - assurances. - - queue_name: Name of the queue. - sequence_number: - The sequence number of the message to be deleted as returned in - BrokerProperties['SequenceNumber'] by the Peek Message operation. - lock_token: - The ID of the lock as returned by the Peek Message operation in - BrokerProperties['LockToken'] - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('sequence_number', sequence_number) - _validate_not_none('lock_token', lock_token) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + \ - '/messages/' + _str(sequence_number) + \ - '/' + _str(lock_token) + '' - request.path, request.query = _update_request_uri_query(request) - request.headers = self._update_service_bus_header(request) - self._perform_request(request) - - def receive_queue_message(self, queue_name, peek_lock=True, timeout=60): - ''' - Receive a message from a queue for processing. - - queue_name: Name of the queue. - peek_lock: - Optional. True to retrieve and lock the message. False to read and - delete the message. Default is True (lock). - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - if peek_lock: - return self.peek_lock_queue_message(queue_name, timeout) - else: - return self.read_delete_queue_message(queue_name, timeout) - - def receive_subscription_message(self, topic_name, subscription_name, - peek_lock=True, timeout=60): - ''' - Receive a message from a subscription for processing. - - topic_name: Name of the topic. - subscription_name: Name of the subscription. - peek_lock: - Optional. True to retrieve and lock the message. False to read and - delete the message. Default is True (lock). - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - if peek_lock: - return self.peek_lock_subscription_message(topic_name, - subscription_name, - timeout) - else: - return self.read_delete_subscription_message(topic_name, - subscription_name, - timeout) - - def _get_host(self): - return self.service_namespace + self.host_base - - def _perform_request(self, request): - try: - resp = self._filter(request) - except HTTPError as ex: - return _service_bus_error_handler(ex) - - return resp - - def _update_service_bus_header(self, request): - ''' Add additional headers for service bus. ''' - - if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: - request.headers.append(('Content-Length', str(len(request.body)))) - - # if it is not GET or HEAD request, must set content-type. - if not request.method in ['GET', 'HEAD']: - for name, _ in request.headers: - if 'content-type' == name.lower(): - break - else: - request.headers.append( - ('Content-Type', - 'application/atom+xml;type=entry;charset=utf-8')) - - # Adds authoriaztion header for authentication. - request.headers.append( - ('Authorization', self._sign_service_bus_request(request))) - - return request.headers - - def _sign_service_bus_request(self, request): - ''' return the signed string with token. ''' - - return 'WRAP access_token="' + \ - self._get_token(request.host, request.path) + '"' - - def _token_is_expired(self, token): - ''' Check if token expires or not. ''' - time_pos_begin = token.find('ExpiresOn=') + len('ExpiresOn=') - time_pos_end = token.find('&', time_pos_begin) - token_expire_time = int(token[time_pos_begin:time_pos_end]) - time_now = time.mktime(time.localtime()) - - # Adding 30 seconds so the token wouldn't be expired when we send the - # token to server. - return (token_expire_time - time_now) < 30 - - def _get_token(self, host, path): - ''' - Returns token for the request. - - host: the service bus service request. - path: the service bus service request. - ''' - wrap_scope = 'http://' + host + path + self.issuer + self.account_key - - # Check whether has unexpired cache, return cached token if it is still - # usable. - if wrap_scope in _tokens: - token = _tokens[wrap_scope] - if not self._token_is_expired(token): - return token - - # get token from accessconstrol server - request = HTTPRequest() - request.protocol_override = 'https' - request.host = host.replace('.servicebus.', '-sb.accesscontrol.') - request.method = 'POST' - request.path = '/WRAPv0.9' - request.body = ('wrap_name=' + url_quote(self.issuer) + - '&wrap_password=' + url_quote(self.account_key) + - '&wrap_scope=' + - url_quote('http://' + host + path)).encode('utf-8') - request.headers.append(('Content-Length', str(len(request.body)))) - resp = self._httpclient.perform_request(request) - - token = resp.body.decode('utf-8') - token = url_unquote(token[token.find('=') + 1:token.rfind('&')]) - _tokens[wrap_scope] = token - - return token +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import datetime +import os +import time + +from azure import ( + WindowsAzureError, + SERVICE_BUS_HOST_BASE, + _convert_response_to_feeds, + _dont_fail_not_exist, + _dont_fail_on_exist, + _encode_base64, + _get_request_body, + _get_request_body_bytes_only, + _int_or_none, + _sign_string, + _str, + _unicode_type, + _update_request_uri_query, + url_quote, + url_unquote, + _validate_not_none, + ) +from azure.http import ( + HTTPError, + HTTPRequest, + ) +from azure.http.httpclient import _HTTPClient +from azure.servicebus import ( + AZURE_SERVICEBUS_NAMESPACE, + AZURE_SERVICEBUS_ACCESS_KEY, + AZURE_SERVICEBUS_ISSUER, + _convert_topic_to_xml, + _convert_response_to_topic, + _convert_queue_to_xml, + _convert_response_to_queue, + _convert_subscription_to_xml, + _convert_response_to_subscription, + _convert_rule_to_xml, + _convert_response_to_rule, + _convert_xml_to_queue, + _convert_xml_to_topic, + _convert_xml_to_subscription, + _convert_xml_to_rule, + _create_message, + _service_bus_error_handler, + ) + + +class ServiceBusService(object): + + def __init__(self, service_namespace=None, account_key=None, issuer=None, + x_ms_version='2011-06-01', host_base=SERVICE_BUS_HOST_BASE, + shared_access_key_name=None, shared_access_key_value=None, + authentication=None): + ''' + Initializes the service bus service for a namespace with the specified + authentication settings (SAS or ACS). + + service_namespace: + Service bus namespace, required for all operations. If None, + the value is set to the AZURE_SERVICEBUS_NAMESPACE env variable. + account_key: + ACS authentication account key. If None, the value is set to the + AZURE_SERVICEBUS_ACCESS_KEY env variable. + Note that if both SAS and ACS settings are specified, SAS is used. + issuer: + ACS authentication issuer. If None, the value is set to the + AZURE_SERVICEBUS_ISSUER env variable. + Note that if both SAS and ACS settings are specified, SAS is used. + x_ms_version: Unused. Kept for backwards compatibility. + host_base: + Optional. Live host base url. Defaults to Azure url. Override this + for on-premise. + shared_access_key_name: + SAS authentication key name. + Note that if both SAS and ACS settings are specified, SAS is used. + shared_access_key_value: + SAS authentication key value. + Note that if both SAS and ACS settings are specified, SAS is used. + authentication: + Instance of authentication class. If this is specified, then + ACS and SAS parameters are ignored. + ''' + self.requestid = None + self.service_namespace = service_namespace + self.host_base = host_base + + if not self.service_namespace: + self.service_namespace = os.environ.get(AZURE_SERVICEBUS_NAMESPACE) + + if not self.service_namespace: + raise WindowsAzureError('You need to provide servicebus namespace') + + if authentication: + self.authentication = authentication + else: + if not account_key: + account_key = os.environ.get(AZURE_SERVICEBUS_ACCESS_KEY) + if not issuer: + issuer = os.environ.get(AZURE_SERVICEBUS_ISSUER) + + if shared_access_key_name and shared_access_key_value: + self.authentication = ServiceBusSASAuthentication( + shared_access_key_name, + shared_access_key_value) + elif account_key and issuer: + self.authentication = ServiceBusWrapTokenAuthentication( + account_key, + issuer) + else: + raise WindowsAzureError( + 'You need to provide servicebus access key and Issuer OR shared access key and value') + + self._httpclient = _HTTPClient(service_instance=self) + self._filter = self._httpclient.perform_request + + # Backwards compatibility: + # account_key and issuer used to be stored on the service class, they are + # now stored on the authentication class. + @property + def account_key(self): + return self.authentication.account_key + + @account_key.setter + def account_key(self, value): + self.authentication.account_key = value + + @property + def issuer(self): + return self.authentication.issuer + + @issuer.setter + def issuer(self, value): + self.authentication.issuer = value + + def with_filter(self, filter): + ''' + Returns a new service which will process requests with the specified + filter. Filtering operations can include logging, automatic retrying, + etc... The filter is a lambda which receives the HTTPRequest and + another lambda. The filter can perform any pre-processing on the + request, pass it off to the next lambda, and then perform any + post-processing on the response. + ''' + res = ServiceBusService( + service_namespace=self.service_namespace, + authentication=self.authentication) + + old_filter = self._filter + + def new_filter(request): + return filter(request, old_filter) + + res._filter = new_filter + return res + + def set_proxy(self, host, port, user=None, password=None): + ''' + Sets the proxy server host and port for the HTTP CONNECT Tunnelling. + + host: Address of the proxy. Ex: '192.168.0.100' + port: Port of the proxy. Ex: 6000 + user: User for proxy authorization. + password: Password for proxy authorization. + ''' + self._httpclient.set_proxy(host, port, user, password) + + def create_queue(self, queue_name, queue=None, fail_on_exist=False): + ''' + Creates a new queue. Once created, this queue's resource manifest is + immutable. + + queue_name: Name of the queue to create. + queue: Queue object to create. + fail_on_exist: + Specify whether to throw an exception when the queue exists. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '' + request.body = _get_request_body(_convert_queue_to_xml(queue)) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def delete_queue(self, queue_name, fail_not_exist=False): + ''' + Deletes an existing queue. This operation will also remove all + associated state including messages in the queue. + + queue_name: Name of the queue to delete. + fail_not_exist: + Specify whether to throw an exception if the queue doesn't exist. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_queue(self, queue_name): + ''' + Retrieves an existing queue. + + queue_name: Name of the queue. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_queue(response) + + def list_queues(self): + ''' + Enumerates the queues in the service namespace. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/$Resources/Queues' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, _convert_xml_to_queue) + + def create_topic(self, topic_name, topic=None, fail_on_exist=False): + ''' + Creates a new topic. Once created, this topic resource manifest is + immutable. + + topic_name: Name of the topic to create. + topic: Topic object to create. + fail_on_exist: + Specify whether to throw an exception when the topic exists. + ''' + _validate_not_none('topic_name', topic_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '' + request.body = _get_request_body(_convert_topic_to_xml(topic)) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def delete_topic(self, topic_name, fail_not_exist=False): + ''' + Deletes an existing topic. This operation will also remove all + associated state including associated subscriptions. + + topic_name: Name of the topic to delete. + fail_not_exist: + Specify whether throw exception when topic doesn't exist. + ''' + _validate_not_none('topic_name', topic_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_topic(self, topic_name): + ''' + Retrieves the description for the specified topic. + + topic_name: Name of the topic. + ''' + _validate_not_none('topic_name', topic_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_topic(response) + + def list_topics(self): + ''' + Retrieves the topics in the service namespace. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/$Resources/Topics' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, _convert_xml_to_topic) + + def create_rule(self, topic_name, subscription_name, rule_name, rule=None, + fail_on_exist=False): + ''' + Creates a new rule. Once created, this rule's resource manifest is + immutable. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + rule_name: Name of the rule. + fail_on_exist: + Specify whether to throw an exception when the rule exists. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + _validate_not_none('rule_name', rule_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '/subscriptions/' + \ + _str(subscription_name) + \ + '/rules/' + _str(rule_name) + '' + request.body = _get_request_body(_convert_rule_to_xml(rule)) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def delete_rule(self, topic_name, subscription_name, rule_name, + fail_not_exist=False): + ''' + Deletes an existing rule. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + rule_name: + Name of the rule to delete. DEFAULT_RULE_NAME=$Default. + Use DEFAULT_RULE_NAME to delete default rule for the subscription. + fail_not_exist: + Specify whether throw exception when rule doesn't exist. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + _validate_not_none('rule_name', rule_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '/subscriptions/' + \ + _str(subscription_name) + \ + '/rules/' + _str(rule_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_rule(self, topic_name, subscription_name, rule_name): + ''' + Retrieves the description for the specified rule. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + rule_name: Name of the rule. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + _validate_not_none('rule_name', rule_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '/subscriptions/' + \ + _str(subscription_name) + \ + '/rules/' + _str(rule_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_rule(response) + + def list_rules(self, topic_name, subscription_name): + ''' + Retrieves the rules that exist under the specified subscription. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(topic_name) + '/subscriptions/' + \ + _str(subscription_name) + '/rules/' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, _convert_xml_to_rule) + + def create_subscription(self, topic_name, subscription_name, + subscription=None, fail_on_exist=False): + ''' + Creates a new subscription. Once created, this subscription resource + manifest is immutable. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + fail_on_exist: + Specify whether throw exception when subscription exists. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' + request.body = _get_request_body( + _convert_subscription_to_xml(subscription)) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def delete_subscription(self, topic_name, subscription_name, + fail_not_exist=False): + ''' + Deletes an existing subscription. + + topic_name: Name of the topic. + subscription_name: Name of the subscription to delete. + fail_not_exist: + Specify whether to throw an exception when the subscription + doesn't exist. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + \ + _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_subscription(self, topic_name, subscription_name): + ''' + Gets an existing subscription. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(topic_name) + '/subscriptions/' + _str(subscription_name) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_subscription(response) + + def list_subscriptions(self, topic_name): + ''' + Retrieves the subscriptions in the specified topic. + + topic_name: Name of the topic. + ''' + _validate_not_none('topic_name', topic_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '/subscriptions/' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, + _convert_xml_to_subscription) + + def send_topic_message(self, topic_name, message=None): + ''' + Enqueues a message into the specified topic. The limit to the number + of messages which may be present in the topic is governed by the + message size in MaxTopicSizeInBytes. If this message causes the topic + to exceed its quota, a quota exceeded error is returned and the + message will be rejected. + + topic_name: Name of the topic. + message: Message object containing message body and properties. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('message', message) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + '/messages' + request.headers = message.add_headers(request) + request.body = _get_request_body_bytes_only( + 'message.body', message.body) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def peek_lock_subscription_message(self, topic_name, subscription_name, + timeout='60'): + ''' + This operation is used to atomically retrieve and lock a message for + processing. The message is guaranteed not to be delivered to other + receivers during the lock duration period specified in buffer + description. Once the lock expires, the message will be available to + other receivers (on the same subscription only) during the lock + duration period specified in the topic description. Once the lock + expires, the message will be available to other receivers. In order to + complete processing of the message, the receiver should issue a delete + command with the lock ID received from this operation. To abandon + processing of the message and unlock it for other receivers, an Unlock + Message command should be issued, or the lock duration period can + expire. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + \ + _str(topic_name) + '/subscriptions/' + \ + _str(subscription_name) + '/messages/head' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _create_message(response, self) + + def unlock_subscription_message(self, topic_name, subscription_name, + sequence_number, lock_token): + ''' + Unlock a message for processing by other receivers on a given + subscription. This operation deletes the lock object, causing the + message to be unlocked. A message must have first been locked by a + receiver before this operation is called. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + sequence_number: + The sequence number of the message to be unlocked as returned in + BrokerProperties['SequenceNumber'] by the Peek Message operation. + lock_token: + The ID of the lock as returned by the Peek Message operation in + BrokerProperties['LockToken'] + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + _validate_not_none('sequence_number', sequence_number) + _validate_not_none('lock_token', lock_token) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + \ + '/subscriptions/' + str(subscription_name) + \ + '/messages/' + _str(sequence_number) + \ + '/' + _str(lock_token) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def read_delete_subscription_message(self, topic_name, subscription_name, + timeout='60'): + ''' + Read and delete a message from a subscription as an atomic operation. + This operation should be used when a best-effort guarantee is + sufficient for an application; that is, using this operation it is + possible for messages to be lost if processing fails. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + \ + '/subscriptions/' + _str(subscription_name) + \ + '/messages/head' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _create_message(response, self) + + def delete_subscription_message(self, topic_name, subscription_name, + sequence_number, lock_token): + ''' + Completes processing on a locked message and delete it from the + subscription. This operation should only be called after processing a + previously locked message is successful to maintain At-Least-Once + delivery assurances. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + sequence_number: + The sequence number of the message to be deleted as returned in + BrokerProperties['SequenceNumber'] by the Peek Message operation. + lock_token: + The ID of the lock as returned by the Peek Message operation in + BrokerProperties['LockToken'] + ''' + _validate_not_none('topic_name', topic_name) + _validate_not_none('subscription_name', subscription_name) + _validate_not_none('sequence_number', sequence_number) + _validate_not_none('lock_token', lock_token) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(topic_name) + \ + '/subscriptions/' + _str(subscription_name) + \ + '/messages/' + _str(sequence_number) + \ + '/' + _str(lock_token) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def send_queue_message(self, queue_name, message=None): + ''' + Sends a message into the specified queue. The limit to the number of + messages which may be present in the topic is governed by the message + size the MaxTopicSizeInMegaBytes. If this message will cause the queue + to exceed its quota, a quota exceeded error is returned and the + message will be rejected. + + queue_name: Name of the queue. + message: Message object containing message body and properties. + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('message', message) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages' + request.headers = message.add_headers(request) + request.body = _get_request_body_bytes_only('message.body', + message.body) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def peek_lock_queue_message(self, queue_name, timeout='60'): + ''' + Automically retrieves and locks a message from a queue for processing. + The message is guaranteed not to be delivered to other receivers (on + the same subscription only) during the lock duration period specified + in the queue description. Once the lock expires, the message will be + available to other receivers. In order to complete processing of the + message, the receiver should issue a delete command with the lock ID + received from this operation. To abandon processing of the message and + unlock it for other receivers, an Unlock Message command should be + issued, or the lock duration period can expire. + + queue_name: Name of the queue. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages/head' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _create_message(response, self) + + def unlock_queue_message(self, queue_name, sequence_number, lock_token): + ''' + Unlocks a message for processing by other receivers on a given + subscription. This operation deletes the lock object, causing the + message to be unlocked. A message must have first been locked by a + receiver before this operation is called. + + queue_name: Name of the queue. + sequence_number: + The sequence number of the message to be unlocked as returned in + BrokerProperties['SequenceNumber'] by the Peek Message operation. + lock_token: + The ID of the lock as returned by the Peek Message operation in + BrokerProperties['LockToken'] + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('sequence_number', sequence_number) + _validate_not_none('lock_token', lock_token) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + \ + '/messages/' + _str(sequence_number) + \ + '/' + _str(lock_token) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def read_delete_queue_message(self, queue_name, timeout='60'): + ''' + Reads and deletes a message from a queue as an atomic operation. This + operation should be used when a best-effort guarantee is sufficient + for an application; that is, using this operation it is possible for + messages to be lost if processing fails. + + queue_name: Name of the queue. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages/head' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + response = self._perform_request(request) + + return _create_message(response, self) + + def delete_queue_message(self, queue_name, sequence_number, lock_token): + ''' + Completes processing on a locked message and delete it from the queue. + This operation should only be called after processing a previously + locked message is successful to maintain At-Least-Once delivery + assurances. + + queue_name: Name of the queue. + sequence_number: + The sequence number of the message to be deleted as returned in + BrokerProperties['SequenceNumber'] by the Peek Message operation. + lock_token: + The ID of the lock as returned by the Peek Message operation in + BrokerProperties['LockToken'] + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('sequence_number', sequence_number) + _validate_not_none('lock_token', lock_token) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + \ + '/messages/' + _str(sequence_number) + \ + '/' + _str(lock_token) + '' + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_service_bus_header(request) + self._perform_request(request) + + def receive_queue_message(self, queue_name, peek_lock=True, timeout=60): + ''' + Receive a message from a queue for processing. + + queue_name: Name of the queue. + peek_lock: + Optional. True to retrieve and lock the message. False to read and + delete the message. Default is True (lock). + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + if peek_lock: + return self.peek_lock_queue_message(queue_name, timeout) + else: + return self.read_delete_queue_message(queue_name, timeout) + + def receive_subscription_message(self, topic_name, subscription_name, + peek_lock=True, timeout=60): + ''' + Receive a message from a subscription for processing. + + topic_name: Name of the topic. + subscription_name: Name of the subscription. + peek_lock: + Optional. True to retrieve and lock the message. False to read and + delete the message. Default is True (lock). + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + if peek_lock: + return self.peek_lock_subscription_message(topic_name, + subscription_name, + timeout) + else: + return self.read_delete_subscription_message(topic_name, + subscription_name, + timeout) + + def _get_host(self): + return self.service_namespace + self.host_base + + def _perform_request(self, request): + try: + resp = self._filter(request) + except HTTPError as ex: + return _service_bus_error_handler(ex) + + return resp + + def _update_service_bus_header(self, request): + ''' Add additional headers for service bus. ''' + + if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: + request.headers.append(('Content-Length', str(len(request.body)))) + + # if it is not GET or HEAD request, must set content-type. + if not request.method in ['GET', 'HEAD']: + for name, _ in request.headers: + if 'content-type' == name.lower(): + break + else: + request.headers.append( + ('Content-Type', + 'application/atom+xml;type=entry;charset=utf-8')) + + # Adds authorization header for authentication. + self.authentication.sign_request(request, self._httpclient) + + return request.headers + + +# Token cache for Authentication +# Shared by the different instances of ServiceBusWrapTokenAuthentication +_tokens = {} + + +class ServiceBusWrapTokenAuthentication: + def __init__(self, account_key, issuer): + self.account_key = account_key + self.issuer = issuer + + def sign_request(self, request, httpclient): + request.headers.append( + ('Authorization', self._get_authorization(request, httpclient))) + + def _get_authorization(self, request, httpclient): + ''' return the signed string with token. ''' + return 'WRAP access_token="' + \ + self._get_token(request.host, request.path, httpclient) + '"' + + def _token_is_expired(self, token): + ''' Check if token expires or not. ''' + time_pos_begin = token.find('ExpiresOn=') + len('ExpiresOn=') + time_pos_end = token.find('&', time_pos_begin) + token_expire_time = int(token[time_pos_begin:time_pos_end]) + time_now = time.mktime(time.localtime()) + + # Adding 30 seconds so the token wouldn't be expired when we send the + # token to server. + return (token_expire_time - time_now) < 30 + + def _get_token(self, host, path, httpclient): + ''' + Returns token for the request. + + host: the service bus service request. + path: the service bus service request. + ''' + wrap_scope = 'http://' + host + path + self.issuer + self.account_key + + # Check whether has unexpired cache, return cached token if it is still + # usable. + if wrap_scope in _tokens: + token = _tokens[wrap_scope] + if not self._token_is_expired(token): + return token + + # get token from accessconstrol server + request = HTTPRequest() + request.protocol_override = 'https' + request.host = host.replace('.servicebus.', '-sb.accesscontrol.') + request.method = 'POST' + request.path = '/WRAPv0.9' + request.body = ('wrap_name=' + url_quote(self.issuer) + + '&wrap_password=' + url_quote(self.account_key) + + '&wrap_scope=' + + url_quote('http://' + host + path)).encode('utf-8') + request.headers.append(('Content-Length', str(len(request.body)))) + resp = httpclient.perform_request(request) + + token = resp.body.decode('utf-8') + token = url_unquote(token[token.find('=') + 1:token.rfind('&')]) + _tokens[wrap_scope] = token + + return token + + +class ServiceBusSASAuthentication: + def __init__(self, key_name, key_value): + self.key_name = key_name + self.key_value = key_value + + def sign_request(self, request, httpclient): + request.headers.append( + ('Authorization', self._get_authorization(request, httpclient))) + + def _get_authorization(self, request, httpclient): + uri = httpclient.get_uri(request) + uri = url_quote(uri, '').lower() + expiry = str(self._get_expiry()) + + to_sign = uri + '\n' + expiry + signature = url_quote(_sign_string(self.key_value, to_sign, False), '') + + auth_format = 'SharedAccessSignature sig={0}&se={1}&skn={2}&sr={3}' + auth = auth_format.format(signature, expiry, self.key_name, uri) + + return auth + + def _get_expiry(self): + '''Returns the UTC datetime, in seconds since Epoch, when this signed + request expires (5 minutes from now).''' + return int(round(time.time() + 300)) diff --git a/awx/lib/site-packages/azure/servicemanagement/__init__.py b/awx/lib/site-packages/azure/servicemanagement/__init__.py index caca5db4ba..b452a828bf 100644 --- a/awx/lib/site-packages/azure/servicemanagement/__init__.py +++ b/awx/lib/site-packages/azure/servicemanagement/__init__.py @@ -1,1692 +1,3004 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from xml.dom import minidom -from azure import ( - WindowsAzureData, - _Base64String, - _create_entry, - _dict_of, - _encode_base64, - _general_error_handler, - _get_children_from_path, - _get_first_child_node_value, - _list_of, - _scalar_list_of, - _str, - ) - -#----------------------------------------------------------------------------- -# Constants for Azure app environment settings. -AZURE_MANAGEMENT_CERTFILE = 'AZURE_MANAGEMENT_CERTFILE' -AZURE_MANAGEMENT_SUBSCRIPTIONID = 'AZURE_MANAGEMENT_SUBSCRIPTIONID' - -# x-ms-version for service management. -X_MS_VERSION = '2013-06-01' - -#----------------------------------------------------------------------------- -# Data classes - - -class StorageServices(WindowsAzureData): - - def __init__(self): - self.storage_services = _list_of(StorageService) - - def __iter__(self): - return iter(self.storage_services) - - def __len__(self): - return len(self.storage_services) - - def __getitem__(self, index): - return self.storage_services[index] - - -class StorageService(WindowsAzureData): - - def __init__(self): - self.url = '' - self.service_name = '' - self.storage_service_properties = StorageAccountProperties() - self.storage_service_keys = StorageServiceKeys() - self.extended_properties = _dict_of( - 'ExtendedProperty', 'Name', 'Value') - self.capabilities = _scalar_list_of(str, 'Capability') - - -class StorageAccountProperties(WindowsAzureData): - - def __init__(self): - self.description = u'' - self.affinity_group = u'' - self.location = u'' - self.label = _Base64String() - self.status = u'' - self.endpoints = _scalar_list_of(str, 'Endpoint') - self.geo_replication_enabled = False - self.geo_primary_region = u'' - self.status_of_primary = u'' - self.geo_secondary_region = u'' - self.status_of_secondary = u'' - self.last_geo_failover_time = u'' - - -class StorageServiceKeys(WindowsAzureData): - - def __init__(self): - self.primary = u'' - self.secondary = u'' - - -class Locations(WindowsAzureData): - - def __init__(self): - self.locations = _list_of(Location) - - def __iter__(self): - return iter(self.locations) - - def __len__(self): - return len(self.locations) - - def __getitem__(self, index): - return self.locations[index] - - -class Location(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.display_name = u'' - self.available_services = _scalar_list_of(str, 'AvailableService') - - -class AffinityGroup(WindowsAzureData): - - def __init__(self): - self.name = '' - self.label = _Base64String() - self.description = u'' - self.location = u'' - self.hosted_services = HostedServices() - self.storage_services = StorageServices() - self.capabilities = _scalar_list_of(str, 'Capability') - - -class AffinityGroups(WindowsAzureData): - - def __init__(self): - self.affinity_groups = _list_of(AffinityGroup) - - def __iter__(self): - return iter(self.affinity_groups) - - def __len__(self): - return len(self.affinity_groups) - - def __getitem__(self, index): - return self.affinity_groups[index] - - -class HostedServices(WindowsAzureData): - - def __init__(self): - self.hosted_services = _list_of(HostedService) - - def __iter__(self): - return iter(self.hosted_services) - - def __len__(self): - return len(self.hosted_services) - - def __getitem__(self, index): - return self.hosted_services[index] - - -class HostedService(WindowsAzureData): - - def __init__(self): - self.url = u'' - self.service_name = u'' - self.hosted_service_properties = HostedServiceProperties() - self.deployments = Deployments() - - -class HostedServiceProperties(WindowsAzureData): - - def __init__(self): - self.description = u'' - self.location = u'' - self.affinity_group = u'' - self.label = _Base64String() - self.status = u'' - self.date_created = u'' - self.date_last_modified = u'' - self.extended_properties = _dict_of( - 'ExtendedProperty', 'Name', 'Value') - - -class VirtualNetworkSites(WindowsAzureData): - - def __init__(self): - self.virtual_network_sites = _list_of(VirtualNetworkSite) - - def __iter__(self): - return iter(self.virtual_network_sites) - - def __len__(self): - return len(self.virtual_network_sites) - - def __getitem__(self, index): - return self.virtual_network_sites[index] - - -class VirtualNetworkSite(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.id = u'' - self.affinity_group = u'' - self.subnets = Subnets() - - -class Subnets(WindowsAzureData): - - def __init__(self): - self.subnets = _list_of(Subnet) - - def __iter__(self): - return iter(self.subnets) - - def __len__(self): - return len(self.subnets) - - def __getitem__(self, index): - return self.subnets[index] - - -class Subnet(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.address_prefix = u'' - - - -class Deployments(WindowsAzureData): - - def __init__(self): - self.deployments = _list_of(Deployment) - - def __iter__(self): - return iter(self.deployments) - - def __len__(self): - return len(self.deployments) - - def __getitem__(self, index): - return self.deployments[index] - - -class Deployment(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.deployment_slot = u'' - self.private_id = u'' - self.status = u'' - self.label = _Base64String() - self.url = u'' - self.configuration = _Base64String() - self.role_instance_list = RoleInstanceList() - self.upgrade_status = UpgradeStatus() - self.upgrade_domain_count = u'' - self.role_list = RoleList() - self.sdk_version = u'' - self.input_endpoint_list = InputEndpoints() - self.locked = False - self.rollback_allowed = False - self.persistent_vm_downtime_info = PersistentVMDowntimeInfo() - self.created_time = u'' - self.virtual_network_name = u'' - self.last_modified_time = u'' - self.extended_properties = _dict_of( - 'ExtendedProperty', 'Name', 'Value') - - -class RoleInstanceList(WindowsAzureData): - - def __init__(self): - self.role_instances = _list_of(RoleInstance) - - def __iter__(self): - return iter(self.role_instances) - - def __len__(self): - return len(self.role_instances) - - def __getitem__(self, index): - return self.role_instances[index] - - -class RoleInstance(WindowsAzureData): - - def __init__(self): - self.role_name = u'' - self.instance_name = u'' - self.instance_status = u'' - self.instance_upgrade_domain = 0 - self.instance_fault_domain = 0 - self.instance_size = u'' - self.instance_state_details = u'' - self.instance_error_code = u'' - self.ip_address = u'' - self.instance_endpoints = InstanceEndpoints() - self.power_state = u'' - self.fqdn = u'' - self.host_name = u'' - - -class InstanceEndpoints(WindowsAzureData): - - def __init__(self): - self.instance_endpoints = _list_of(InstanceEndpoint) - - def __iter__(self): - return iter(self.instance_endpoints) - - def __len__(self): - return len(self.instance_endpoints) - - def __getitem__(self, index): - return self.instance_endpoints[index] - - -class InstanceEndpoint(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.vip = u'' - self.public_port = u'' - self.local_port = u'' - self.protocol = u'' - - -class UpgradeStatus(WindowsAzureData): - - def __init__(self): - self.upgrade_type = u'' - self.current_upgrade_domain_state = u'' - self.current_upgrade_domain = u'' - - -class InputEndpoints(WindowsAzureData): - - def __init__(self): - self.input_endpoints = _list_of(InputEndpoint) - - def __iter__(self): - return iter(self.input_endpoints) - - def __len__(self): - return len(self.input_endpoints) - - def __getitem__(self, index): - return self.input_endpoints[index] - - -class InputEndpoint(WindowsAzureData): - - def __init__(self): - self.role_name = u'' - self.vip = u'' - self.port = u'' - - -class RoleList(WindowsAzureData): - - def __init__(self): - self.roles = _list_of(Role) - - def __iter__(self): - return iter(self.roles) - - def __len__(self): - return len(self.roles) - - def __getitem__(self, index): - return self.roles[index] - - -class Role(WindowsAzureData): - - def __init__(self): - self.role_name = u'' - self.role_type = u'' - self.os_version = u'' - self.configuration_sets = ConfigurationSets() - self.availability_set_name = u'' - self.data_virtual_hard_disks = DataVirtualHardDisks() - self.os_virtual_hard_disk = OSVirtualHardDisk() - self.role_size = u'' - self.default_win_rm_certificate_thumbprint = u'' - - -class PersistentVMDowntimeInfo(WindowsAzureData): - - def __init__(self): - self.start_time = u'' - self.end_time = u'' - self.status = u'' - - -class Certificates(WindowsAzureData): - - def __init__(self): - self.certificates = _list_of(Certificate) - - def __iter__(self): - return iter(self.certificates) - - def __len__(self): - return len(self.certificates) - - def __getitem__(self, index): - return self.certificates[index] - - -class Certificate(WindowsAzureData): - - def __init__(self): - self.certificate_url = u'' - self.thumbprint = u'' - self.thumbprint_algorithm = u'' - self.data = u'' - - -class OperationError(WindowsAzureData): - - def __init__(self): - self.code = u'' - self.message = u'' - - -class Operation(WindowsAzureData): - - def __init__(self): - self.id = u'' - self.status = u'' - self.http_status_code = u'' - self.error = OperationError() - - -class OperatingSystem(WindowsAzureData): - - def __init__(self): - self.version = u'' - self.label = _Base64String() - self.is_default = True - self.is_active = True - self.family = 0 - self.family_label = _Base64String() - - -class OperatingSystems(WindowsAzureData): - - def __init__(self): - self.operating_systems = _list_of(OperatingSystem) - - def __iter__(self): - return iter(self.operating_systems) - - def __len__(self): - return len(self.operating_systems) - - def __getitem__(self, index): - return self.operating_systems[index] - - -class OperatingSystemFamily(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.label = _Base64String() - self.operating_systems = OperatingSystems() - - -class OperatingSystemFamilies(WindowsAzureData): - - def __init__(self): - self.operating_system_families = _list_of(OperatingSystemFamily) - - def __iter__(self): - return iter(self.operating_system_families) - - def __len__(self): - return len(self.operating_system_families) - - def __getitem__(self, index): - return self.operating_system_families[index] - - -class Subscription(WindowsAzureData): - - def __init__(self): - self.subscription_id = u'' - self.subscription_name = u'' - self.subscription_status = u'' - self.account_admin_live_email_id = u'' - self.service_admin_live_email_id = u'' - self.max_core_count = 0 - self.max_storage_accounts = 0 - self.max_hosted_services = 0 - self.current_core_count = 0 - self.current_hosted_services = 0 - self.current_storage_accounts = 0 - self.max_virtual_network_sites = 0 - self.max_local_network_sites = 0 - self.max_dns_servers = 0 - - -class AvailabilityResponse(WindowsAzureData): - - def __init__(self): - self.result = False - - -class SubscriptionCertificates(WindowsAzureData): - - def __init__(self): - self.subscription_certificates = _list_of(SubscriptionCertificate) - - def __iter__(self): - return iter(self.subscription_certificates) - - def __len__(self): - return len(self.subscription_certificates) - - def __getitem__(self, index): - return self.subscription_certificates[index] - - -class SubscriptionCertificate(WindowsAzureData): - - def __init__(self): - self.subscription_certificate_public_key = u'' - self.subscription_certificate_thumbprint = u'' - self.subscription_certificate_data = u'' - self.created = u'' - - -class Images(WindowsAzureData): - - def __init__(self): - self.images = _list_of(OSImage) - - def __iter__(self): - return iter(self.images) - - def __len__(self): - return len(self.images) - - def __getitem__(self, index): - return self.images[index] - - -class OSImage(WindowsAzureData): - - def __init__(self): - self.affinity_group = u'' - self.category = u'' - self.location = u'' - self.logical_size_in_gb = 0 - self.label = u'' - self.media_link = u'' - self.name = u'' - self.os = u'' - self.eula = u'' - self.description = u'' - - -class Disks(WindowsAzureData): - - def __init__(self): - self.disks = _list_of(Disk) - - def __iter__(self): - return iter(self.disks) - - def __len__(self): - return len(self.disks) - - def __getitem__(self, index): - return self.disks[index] - - -class Disk(WindowsAzureData): - - def __init__(self): - self.affinity_group = u'' - self.attached_to = AttachedTo() - self.has_operating_system = u'' - self.is_corrupted = u'' - self.location = u'' - self.logical_disk_size_in_gb = 0 - self.label = u'' - self.media_link = u'' - self.name = u'' - self.os = u'' - self.source_image_name = u'' - - -class AttachedTo(WindowsAzureData): - - def __init__(self): - self.hosted_service_name = u'' - self.deployment_name = u'' - self.role_name = u'' - - -class PersistentVMRole(WindowsAzureData): - - def __init__(self): - self.role_name = u'' - self.role_type = u'' - self.os_version = u'' # undocumented - self.configuration_sets = ConfigurationSets() - self.availability_set_name = u'' - self.data_virtual_hard_disks = DataVirtualHardDisks() - self.os_virtual_hard_disk = OSVirtualHardDisk() - self.role_size = u'' - self.default_win_rm_certificate_thumbprint = u'' - - -class ConfigurationSets(WindowsAzureData): - - def __init__(self): - self.configuration_sets = _list_of(ConfigurationSet) - - def __iter__(self): - return iter(self.configuration_sets) - - def __len__(self): - return len(self.configuration_sets) - - def __getitem__(self, index): - return self.configuration_sets[index] - - -class ConfigurationSet(WindowsAzureData): - - def __init__(self): - self.configuration_set_type = u'NetworkConfiguration' - self.role_type = u'' - self.input_endpoints = ConfigurationSetInputEndpoints() - self.subnet_names = _scalar_list_of(str, 'SubnetName') - - -class ConfigurationSetInputEndpoints(WindowsAzureData): - - def __init__(self): - self.input_endpoints = _list_of( - ConfigurationSetInputEndpoint, 'InputEndpoint') - - def __iter__(self): - return iter(self.input_endpoints) - - def __len__(self): - return len(self.input_endpoints) - - def __getitem__(self, index): - return self.input_endpoints[index] - - -class ConfigurationSetInputEndpoint(WindowsAzureData): - - ''' - Initializes a network configuration input endpoint. - - name: Specifies the name for the external endpoint. - protocol: - Specifies the protocol to use to inspect the virtual machine - availability status. Possible values are: HTTP, TCP. - port: Specifies the external port to use for the endpoint. - local_port: - Specifies the internal port on which the virtual machine is listening - to serve the endpoint. - load_balanced_endpoint_set_name: - Specifies a name for a set of load-balanced endpoints. Specifying this - element for a given endpoint adds it to the set. If you are setting an - endpoint to use to connect to the virtual machine via the Remote - Desktop, do not set this property. - enable_direct_server_return: - Specifies whether direct server return load balancing is enabled. - ''' - - def __init__(self, name=u'', protocol=u'', port=u'', local_port=u'', - load_balanced_endpoint_set_name=u'', - enable_direct_server_return=False): - self.enable_direct_server_return = enable_direct_server_return - self.load_balanced_endpoint_set_name = load_balanced_endpoint_set_name - self.local_port = local_port - self.name = name - self.port = port - self.load_balancer_probe = LoadBalancerProbe() - self.protocol = protocol - - -class WindowsConfigurationSet(WindowsAzureData): - - def __init__(self, computer_name=None, admin_password=None, - reset_password_on_first_logon=None, - enable_automatic_updates=None, time_zone=None, - admin_username=None): - self.configuration_set_type = u'WindowsProvisioningConfiguration' - self.computer_name = computer_name - self.admin_password = admin_password - self.admin_username = admin_username - self.reset_password_on_first_logon = reset_password_on_first_logon - self.enable_automatic_updates = enable_automatic_updates - self.time_zone = time_zone - self.domain_join = DomainJoin() - self.stored_certificate_settings = StoredCertificateSettings() - self.win_rm = WinRM() - - -class DomainJoin(WindowsAzureData): - - def __init__(self): - self.credentials = Credentials() - self.join_domain = u'' - self.machine_object_ou = u'' - - -class Credentials(WindowsAzureData): - - def __init__(self): - self.domain = u'' - self.username = u'' - self.password = u'' - - -class StoredCertificateSettings(WindowsAzureData): - - def __init__(self): - self.stored_certificate_settings = _list_of(CertificateSetting) - - def __iter__(self): - return iter(self.stored_certificate_settings) - - def __len__(self): - return len(self.stored_certificate_settings) - - def __getitem__(self, index): - return self.stored_certificate_settings[index] - - -class CertificateSetting(WindowsAzureData): - - ''' - Initializes a certificate setting. - - thumbprint: - Specifies the thumbprint of the certificate to be provisioned. The - thumbprint must specify an existing service certificate. - store_name: - Specifies the name of the certificate store from which retrieve - certificate. - store_location: - Specifies the target certificate store location on the virtual machine. - The only supported value is LocalMachine. - ''' - - def __init__(self, thumbprint=u'', store_name=u'', store_location=u''): - self.thumbprint = thumbprint - self.store_name = store_name - self.store_location = store_location - - -class WinRM(WindowsAzureData): - - ''' - Contains configuration settings for the Windows Remote Management service on - the Virtual Machine. - ''' - - def __init__(self): - self.listeners = Listeners() - - -class Listeners(WindowsAzureData): - - def __init__(self): - self.listeners = _list_of(Listener) - - def __iter__(self): - return iter(self.listeners) - - def __len__(self): - return len(self.listeners) - - def __getitem__(self, index): - return self.listeners[index] - - -class Listener(WindowsAzureData): - - ''' - Specifies the protocol and certificate information for the listener. - - protocol: - Specifies the protocol of listener. Possible values are: Http, Https. - The value is case sensitive. - certificate_thumbprint: - Optional. Specifies the certificate thumbprint for the secure - connection. If this value is not specified, a self-signed certificate is - generated and used for the Virtual Machine. - ''' - - def __init__(self, protocol=u'', certificate_thumbprint=u''): - self.protocol = protocol - self.certificate_thumbprint = certificate_thumbprint - - -class LinuxConfigurationSet(WindowsAzureData): - - def __init__(self, host_name=None, user_name=None, user_password=None, - disable_ssh_password_authentication=None): - self.configuration_set_type = u'LinuxProvisioningConfiguration' - self.host_name = host_name - self.user_name = user_name - self.user_password = user_password - self.disable_ssh_password_authentication =\ - disable_ssh_password_authentication - self.ssh = SSH() - - -class SSH(WindowsAzureData): - - def __init__(self): - self.public_keys = PublicKeys() - self.key_pairs = KeyPairs() - - -class PublicKeys(WindowsAzureData): - - def __init__(self): - self.public_keys = _list_of(PublicKey) - - def __iter__(self): - return iter(self.public_keys) - - def __len__(self): - return len(self.public_keys) - - def __getitem__(self, index): - return self.public_keys[index] - - -class PublicKey(WindowsAzureData): - - def __init__(self, fingerprint=u'', path=u''): - self.fingerprint = fingerprint - self.path = path - - -class KeyPairs(WindowsAzureData): - - def __init__(self): - self.key_pairs = _list_of(KeyPair) - - def __iter__(self): - return iter(self.key_pairs) - - def __len__(self): - return len(self.key_pairs) - - def __getitem__(self, index): - return self.key_pairs[index] - - -class KeyPair(WindowsAzureData): - - def __init__(self, fingerprint=u'', path=u''): - self.fingerprint = fingerprint - self.path = path - - -class LoadBalancerProbe(WindowsAzureData): - - def __init__(self): - self.path = u'' - self.port = u'' - self.protocol = u'' - - -class DataVirtualHardDisks(WindowsAzureData): - - def __init__(self): - self.data_virtual_hard_disks = _list_of(DataVirtualHardDisk) - - def __iter__(self): - return iter(self.data_virtual_hard_disks) - - def __len__(self): - return len(self.data_virtual_hard_disks) - - def __getitem__(self, index): - return self.data_virtual_hard_disks[index] - - -class DataVirtualHardDisk(WindowsAzureData): - - def __init__(self): - self.host_caching = u'' - self.disk_label = u'' - self.disk_name = u'' - self.lun = 0 - self.logical_disk_size_in_gb = 0 - self.media_link = u'' - - -class OSVirtualHardDisk(WindowsAzureData): - - def __init__(self, source_image_name=None, media_link=None, - host_caching=None, disk_label=None, disk_name=None): - self.source_image_name = source_image_name - self.media_link = media_link - self.host_caching = host_caching - self.disk_label = disk_label - self.disk_name = disk_name - self.os = u'' # undocumented, not used when adding a role - - -class AsynchronousOperationResult(WindowsAzureData): - - def __init__(self, request_id=None): - self.request_id = request_id - - -class ServiceBusRegion(WindowsAzureData): - - def __init__(self): - self.code = u'' - self.fullname = u'' - - -class ServiceBusNamespace(WindowsAzureData): - - def __init__(self): - self.name = u'' - self.region = u'' - self.default_key = u'' - self.status = u'' - self.created_at = u'' - self.acs_management_endpoint = u'' - self.servicebus_endpoint = u'' - self.connection_string = u'' - self.subscription_id = u'' - self.enabled = False - - -def _update_management_header(request): - ''' Add additional headers for management. ''' - - if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: - request.headers.append(('Content-Length', str(len(request.body)))) - - # append additional headers base on the service - request.headers.append(('x-ms-version', X_MS_VERSION)) - - # if it is not GET or HEAD request, must set content-type. - if not request.method in ['GET', 'HEAD']: - for name, _ in request.headers: - if 'content-type' == name.lower(): - break - else: - request.headers.append( - ('Content-Type', - 'application/atom+xml;type=entry;charset=utf-8')) - - return request.headers - - -def _parse_response_for_async_op(response): - ''' Extracts request id from response header. ''' - - if response is None: - return None - - result = AsynchronousOperationResult() - if response.headers: - for name, value in response.headers: - if name.lower() == 'x-ms-request-id': - result.request_id = value - - return result - - -def _management_error_handler(http_error): - ''' Simple error handler for management service. ''' - return _general_error_handler(http_error) - - -def _lower(text): - return text.lower() - - -class _XmlSerializer(object): - - @staticmethod - def create_storage_service_input_to_xml(service_name, description, label, - affinity_group, location, - geo_replication_enabled, - extended_properties): - return _XmlSerializer.doc_from_data( - 'CreateStorageServiceInput', - [('ServiceName', service_name), - ('Description', description), - ('Label', label, _encode_base64), - ('AffinityGroup', affinity_group), - ('Location', location), - ('GeoReplicationEnabled', geo_replication_enabled, _lower)], - extended_properties) - - @staticmethod - def update_storage_service_input_to_xml(description, label, - geo_replication_enabled, - extended_properties): - return _XmlSerializer.doc_from_data( - 'UpdateStorageServiceInput', - [('Description', description), - ('Label', label, _encode_base64), - ('GeoReplicationEnabled', geo_replication_enabled, _lower)], - extended_properties) - - @staticmethod - def regenerate_keys_to_xml(key_type): - return _XmlSerializer.doc_from_data('RegenerateKeys', - [('KeyType', key_type)]) - - @staticmethod - def update_hosted_service_to_xml(label, description, extended_properties): - return _XmlSerializer.doc_from_data('UpdateHostedService', - [('Label', label, _encode_base64), - ('Description', description)], - extended_properties) - - @staticmethod - def create_hosted_service_to_xml(service_name, label, description, - location, affinity_group, - extended_properties): - return _XmlSerializer.doc_from_data( - 'CreateHostedService', - [('ServiceName', service_name), - ('Label', label, _encode_base64), - ('Description', description), - ('Location', location), - ('AffinityGroup', affinity_group)], - extended_properties) - - @staticmethod - def create_deployment_to_xml(name, package_url, label, configuration, - start_deployment, treat_warnings_as_error, - extended_properties): - return _XmlSerializer.doc_from_data( - 'CreateDeployment', - [('Name', name), - ('PackageUrl', package_url), - ('Label', label, _encode_base64), - ('Configuration', configuration), - ('StartDeployment', - start_deployment, _lower), - ('TreatWarningsAsError', treat_warnings_as_error, _lower)], - extended_properties) - - @staticmethod - def swap_deployment_to_xml(production, source_deployment): - return _XmlSerializer.doc_from_data( - 'Swap', - [('Production', production), - ('SourceDeployment', source_deployment)]) - - @staticmethod - def update_deployment_status_to_xml(status): - return _XmlSerializer.doc_from_data( - 'UpdateDeploymentStatus', - [('Status', status)]) - - @staticmethod - def change_deployment_to_xml(configuration, treat_warnings_as_error, mode, - extended_properties): - return _XmlSerializer.doc_from_data( - 'ChangeConfiguration', - [('Configuration', configuration), - ('TreatWarningsAsError', treat_warnings_as_error, _lower), - ('Mode', mode)], - extended_properties) - - @staticmethod - def upgrade_deployment_to_xml(mode, package_url, configuration, label, - role_to_upgrade, force, extended_properties): - return _XmlSerializer.doc_from_data( - 'UpgradeDeployment', - [('Mode', mode), - ('PackageUrl', package_url), - ('Configuration', configuration), - ('Label', label, _encode_base64), - ('RoleToUpgrade', role_to_upgrade), - ('Force', force, _lower)], - extended_properties) - - @staticmethod - def rollback_upgrade_to_xml(mode, force): - return _XmlSerializer.doc_from_data( - 'RollbackUpdateOrUpgrade', - [('Mode', mode), - ('Force', force, _lower)]) - - @staticmethod - def walk_upgrade_domain_to_xml(upgrade_domain): - return _XmlSerializer.doc_from_data( - 'WalkUpgradeDomain', - [('UpgradeDomain', upgrade_domain)]) - - @staticmethod - def certificate_file_to_xml(data, certificate_format, password): - return _XmlSerializer.doc_from_data( - 'CertificateFile', - [('Data', data), - ('CertificateFormat', certificate_format), - ('Password', password)]) - - @staticmethod - def create_affinity_group_to_xml(name, label, description, location): - return _XmlSerializer.doc_from_data( - 'CreateAffinityGroup', - [('Name', name), - ('Label', label, _encode_base64), - ('Description', description), - ('Location', location)]) - - @staticmethod - def update_affinity_group_to_xml(label, description): - return _XmlSerializer.doc_from_data( - 'UpdateAffinityGroup', - [('Label', label, _encode_base64), - ('Description', description)]) - - @staticmethod - def subscription_certificate_to_xml(public_key, thumbprint, data): - return _XmlSerializer.doc_from_data( - 'SubscriptionCertificate', - [('SubscriptionCertificatePublicKey', public_key), - ('SubscriptionCertificateThumbprint', thumbprint), - ('SubscriptionCertificateData', data)]) - - @staticmethod - def os_image_to_xml(label, media_link, name, os): - return _XmlSerializer.doc_from_data( - 'OSImage', - [('Label', label), - ('MediaLink', media_link), - ('Name', name), - ('OS', os)]) - - @staticmethod - def data_virtual_hard_disk_to_xml(host_caching, disk_label, disk_name, lun, - logical_disk_size_in_gb, media_link, - source_media_link): - return _XmlSerializer.doc_from_data( - 'DataVirtualHardDisk', - [('HostCaching', host_caching), - ('DiskLabel', disk_label), - ('DiskName', disk_name), - ('Lun', lun), - ('LogicalDiskSizeInGB', logical_disk_size_in_gb), - ('MediaLink', media_link), - ('SourceMediaLink', source_media_link)]) - - @staticmethod - def disk_to_xml(has_operating_system, label, media_link, name, os): - return _XmlSerializer.doc_from_data( - 'Disk', - [('HasOperatingSystem', has_operating_system, _lower), - ('Label', label), - ('MediaLink', media_link), - ('Name', name), - ('OS', os)]) - - @staticmethod - def restart_role_operation_to_xml(): - return _XmlSerializer.doc_from_xml( - 'RestartRoleOperation', - '<OperationType>RestartRoleOperation</OperationType>') - - @staticmethod - def shutdown_role_operation_to_xml(post_shutdown_action): - xml = _XmlSerializer.data_to_xml( - [('OperationType', 'ShutdownRoleOperation'), - ('PostShutdownAction', post_shutdown_action)]) - return _XmlSerializer.doc_from_xml('ShutdownRoleOperation', xml) - - @staticmethod - def shutdown_roles_operation_to_xml(role_names, post_shutdown_action): - xml = _XmlSerializer.data_to_xml( - [('OperationType', 'ShutdownRolesOperation')]) - xml += '<Roles>' - for role_name in role_names: - xml += _XmlSerializer.data_to_xml([('Name', role_name)]) - xml += '</Roles>' - xml += _XmlSerializer.data_to_xml( - [('PostShutdownAction', post_shutdown_action)]) - return _XmlSerializer.doc_from_xml('ShutdownRolesOperation', xml) - - @staticmethod - def start_role_operation_to_xml(): - return _XmlSerializer.doc_from_xml( - 'StartRoleOperation', - '<OperationType>StartRoleOperation</OperationType>') - - @staticmethod - def start_roles_operation_to_xml(role_names): - xml = _XmlSerializer.data_to_xml( - [('OperationType', 'StartRolesOperation')]) - xml += '<Roles>' - for role_name in role_names: - xml += _XmlSerializer.data_to_xml([('Name', role_name)]) - xml += '</Roles>' - return _XmlSerializer.doc_from_xml('StartRolesOperation', xml) - - @staticmethod - def windows_configuration_to_xml(configuration): - xml = _XmlSerializer.data_to_xml( - [('ConfigurationSetType', configuration.configuration_set_type), - ('ComputerName', configuration.computer_name), - ('AdminPassword', configuration.admin_password), - ('ResetPasswordOnFirstLogon', - configuration.reset_password_on_first_logon, - _lower), - ('EnableAutomaticUpdates', - configuration.enable_automatic_updates, - _lower), - ('TimeZone', configuration.time_zone)]) - - if configuration.domain_join is not None: - xml += '<DomainJoin>' - xml += '<Credentials>' - xml += _XmlSerializer.data_to_xml( - [('Domain', configuration.domain_join.credentials.domain), - ('Username', configuration.domain_join.credentials.username), - ('Password', configuration.domain_join.credentials.password)]) - xml += '</Credentials>' - xml += _XmlSerializer.data_to_xml( - [('JoinDomain', configuration.domain_join.join_domain), - ('MachineObjectOU', - configuration.domain_join.machine_object_ou)]) - xml += '</DomainJoin>' - if configuration.stored_certificate_settings is not None: - xml += '<StoredCertificateSettings>' - for cert in configuration.stored_certificate_settings: - xml += '<CertificateSetting>' - xml += _XmlSerializer.data_to_xml( - [('StoreLocation', cert.store_location), - ('StoreName', cert.store_name), - ('Thumbprint', cert.thumbprint)]) - xml += '</CertificateSetting>' - xml += '</StoredCertificateSettings>' - if configuration.win_rm is not None: - xml += '<WinRM><Listeners>' - for listener in configuration.win_rm.listeners: - xml += '<Listener>' - xml += _XmlSerializer.data_to_xml( - [('Protocol', listener.protocol), - ('CertificateThumbprint', listener.certificate_thumbprint)]) - xml += '</Listener>' - xml += '</Listeners></WinRM>' - xml += _XmlSerializer.data_to_xml( - [('AdminUsername', configuration.admin_username)]) - return xml - - @staticmethod - def linux_configuration_to_xml(configuration): - xml = _XmlSerializer.data_to_xml( - [('ConfigurationSetType', configuration.configuration_set_type), - ('HostName', configuration.host_name), - ('UserName', configuration.user_name), - ('UserPassword', configuration.user_password), - ('DisableSshPasswordAuthentication', - configuration.disable_ssh_password_authentication, - _lower)]) - - if configuration.ssh is not None: - xml += '<SSH>' - xml += '<PublicKeys>' - for key in configuration.ssh.public_keys: - xml += '<PublicKey>' - xml += _XmlSerializer.data_to_xml( - [('Fingerprint', key.fingerprint), - ('Path', key.path)]) - xml += '</PublicKey>' - xml += '</PublicKeys>' - xml += '<KeyPairs>' - for key in configuration.ssh.key_pairs: - xml += '<KeyPair>' - xml += _XmlSerializer.data_to_xml( - [('Fingerprint', key.fingerprint), - ('Path', key.path)]) - xml += '</KeyPair>' - xml += '</KeyPairs>' - xml += '</SSH>' - return xml - - @staticmethod - def network_configuration_to_xml(configuration): - xml = _XmlSerializer.data_to_xml( - [('ConfigurationSetType', configuration.configuration_set_type)]) - xml += '<InputEndpoints>' - for endpoint in configuration.input_endpoints: - xml += '<InputEndpoint>' - xml += _XmlSerializer.data_to_xml( - [('LoadBalancedEndpointSetName', - endpoint.load_balanced_endpoint_set_name), - ('LocalPort', endpoint.local_port), - ('Name', endpoint.name), - ('Port', endpoint.port)]) - - if endpoint.load_balancer_probe.path or\ - endpoint.load_balancer_probe.port or\ - endpoint.load_balancer_probe.protocol: - xml += '<LoadBalancerProbe>' - xml += _XmlSerializer.data_to_xml( - [('Path', endpoint.load_balancer_probe.path), - ('Port', endpoint.load_balancer_probe.port), - ('Protocol', endpoint.load_balancer_probe.protocol)]) - xml += '</LoadBalancerProbe>' - - xml += _XmlSerializer.data_to_xml( - [('Protocol', endpoint.protocol), - ('EnableDirectServerReturn', - endpoint.enable_direct_server_return, - _lower)]) - - xml += '</InputEndpoint>' - xml += '</InputEndpoints>' - xml += '<SubnetNames>' - for name in configuration.subnet_names: - xml += _XmlSerializer.data_to_xml([('SubnetName', name)]) - xml += '</SubnetNames>' - return xml - - @staticmethod - def role_to_xml(availability_set_name, data_virtual_hard_disks, - network_configuration_set, os_virtual_hard_disk, role_name, - role_size, role_type, system_configuration_set): - xml = _XmlSerializer.data_to_xml([('RoleName', role_name), - ('RoleType', role_type)]) - - xml += '<ConfigurationSets>' - - if system_configuration_set is not None: - xml += '<ConfigurationSet>' - if isinstance(system_configuration_set, WindowsConfigurationSet): - xml += _XmlSerializer.windows_configuration_to_xml( - system_configuration_set) - elif isinstance(system_configuration_set, LinuxConfigurationSet): - xml += _XmlSerializer.linux_configuration_to_xml( - system_configuration_set) - xml += '</ConfigurationSet>' - - if network_configuration_set is not None: - xml += '<ConfigurationSet>' - xml += _XmlSerializer.network_configuration_to_xml( - network_configuration_set) - xml += '</ConfigurationSet>' - - xml += '</ConfigurationSets>' - - if availability_set_name is not None: - xml += _XmlSerializer.data_to_xml( - [('AvailabilitySetName', availability_set_name)]) - - if data_virtual_hard_disks is not None: - xml += '<DataVirtualHardDisks>' - for hd in data_virtual_hard_disks: - xml += '<DataVirtualHardDisk>' - xml += _XmlSerializer.data_to_xml( - [('HostCaching', hd.host_caching), - ('DiskLabel', hd.disk_label), - ('DiskName', hd.disk_name), - ('Lun', hd.lun), - ('LogicalDiskSizeInGB', hd.logical_disk_size_in_gb), - ('MediaLink', hd.media_link)]) - xml += '</DataVirtualHardDisk>' - xml += '</DataVirtualHardDisks>' - - if os_virtual_hard_disk is not None: - xml += '<OSVirtualHardDisk>' - xml += _XmlSerializer.data_to_xml( - [('HostCaching', os_virtual_hard_disk.host_caching), - ('DiskLabel', os_virtual_hard_disk.disk_label), - ('DiskName', os_virtual_hard_disk.disk_name), - ('MediaLink', os_virtual_hard_disk.media_link), - ('SourceImageName', os_virtual_hard_disk.source_image_name)]) - xml += '</OSVirtualHardDisk>' - - if role_size is not None: - xml += _XmlSerializer.data_to_xml([('RoleSize', role_size)]) - - return xml - - @staticmethod - def add_role_to_xml(role_name, system_configuration_set, - os_virtual_hard_disk, role_type, - network_configuration_set, availability_set_name, - data_virtual_hard_disks, role_size): - xml = _XmlSerializer.role_to_xml( - availability_set_name, - data_virtual_hard_disks, - network_configuration_set, - os_virtual_hard_disk, - role_name, - role_size, - role_type, - system_configuration_set) - return _XmlSerializer.doc_from_xml('PersistentVMRole', xml) - - @staticmethod - def update_role_to_xml(role_name, os_virtual_hard_disk, role_type, - network_configuration_set, availability_set_name, - data_virtual_hard_disks, role_size): - xml = _XmlSerializer.role_to_xml( - availability_set_name, - data_virtual_hard_disks, - network_configuration_set, - os_virtual_hard_disk, - role_name, - role_size, - role_type, - None) - return _XmlSerializer.doc_from_xml('PersistentVMRole', xml) - - @staticmethod - def capture_role_to_xml(post_capture_action, target_image_name, - target_image_label, provisioning_configuration): - xml = _XmlSerializer.data_to_xml( - [('OperationType', 'CaptureRoleOperation'), - ('PostCaptureAction', post_capture_action)]) - - if provisioning_configuration is not None: - xml += '<ProvisioningConfiguration>' - if isinstance(provisioning_configuration, WindowsConfigurationSet): - xml += _XmlSerializer.windows_configuration_to_xml( - provisioning_configuration) - elif isinstance(provisioning_configuration, LinuxConfigurationSet): - xml += _XmlSerializer.linux_configuration_to_xml( - provisioning_configuration) - xml += '</ProvisioningConfiguration>' - - xml += _XmlSerializer.data_to_xml( - [('TargetImageLabel', target_image_label), - ('TargetImageName', target_image_name)]) - - return _XmlSerializer.doc_from_xml('CaptureRoleOperation', xml) - - @staticmethod - def virtual_machine_deployment_to_xml(deployment_name, deployment_slot, - label, role_name, - system_configuration_set, - os_virtual_hard_disk, role_type, - network_configuration_set, - availability_set_name, - data_virtual_hard_disks, role_size, - virtual_network_name): - xml = _XmlSerializer.data_to_xml([('Name', deployment_name), - ('DeploymentSlot', deployment_slot), - ('Label', label)]) - xml += '<RoleList>' - xml += '<Role>' - xml += _XmlSerializer.role_to_xml( - availability_set_name, - data_virtual_hard_disks, - network_configuration_set, - os_virtual_hard_disk, - role_name, - role_size, - role_type, - system_configuration_set) - xml += '</Role>' - xml += '</RoleList>' - - if virtual_network_name is not None: - xml += _XmlSerializer.data_to_xml( - [('VirtualNetworkName', virtual_network_name)]) - - return _XmlSerializer.doc_from_xml('Deployment', xml) - - @staticmethod - def data_to_xml(data): - '''Creates an xml fragment from the specified data. - data: Array of tuples, where first: xml element name - second: xml element text - third: conversion function - ''' - xml = '' - for element in data: - name = element[0] - val = element[1] - if len(element) > 2: - converter = element[2] - else: - converter = None - - if val is not None: - if converter is not None: - text = _str(converter(_str(val))) - else: - text = _str(val) - - xml += ''.join(['<', name, '>', text, '</', name, '>']) - return xml - - @staticmethod - def doc_from_xml(document_element_name, inner_xml): - '''Wraps the specified xml in an xml root element with default azure - namespaces''' - xml = ''.join(['<', document_element_name, - ' xmlns:i="http://www.w3.org/2001/XMLSchema-instance"', - ' xmlns="http://schemas.microsoft.com/windowsazure">']) - xml += inner_xml - xml += ''.join(['</', document_element_name, '>']) - return xml - - @staticmethod - def doc_from_data(document_element_name, data, extended_properties=None): - xml = _XmlSerializer.data_to_xml(data) - if extended_properties is not None: - xml += _XmlSerializer.extended_properties_dict_to_xml_fragment( - extended_properties) - return _XmlSerializer.doc_from_xml(document_element_name, xml) - - @staticmethod - def extended_properties_dict_to_xml_fragment(extended_properties): - xml = '' - if extended_properties is not None and len(extended_properties) > 0: - xml += '<ExtendedProperties>' - for key, val in extended_properties.items(): - xml += ''.join(['<ExtendedProperty>', - '<Name>', - _str(key), - '</Name>', - '<Value>', - _str(val), - '</Value>', - '</ExtendedProperty>']) - xml += '</ExtendedProperties>' - return xml - - -def _parse_bool(value): - if value.lower() == 'true': - return True - return False - - -class _ServiceBusManagementXmlSerializer(object): - - @staticmethod - def namespace_to_xml(region): - '''Converts a service bus namespace description to xml - - The xml format: -<?xml version="1.0" encoding="utf-8" standalone="yes"?> -<entry xmlns="http://www.w3.org/2005/Atom"> - <content type="application/xml"> - <NamespaceDescription - xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> - <Region>West US</Region> - </NamespaceDescription> - </content> -</entry> - ''' - body = '<NamespaceDescription xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">' - body += ''.join(['<Region>', region, '</Region>']) - body += '</NamespaceDescription>' - - return _create_entry(body) - - @staticmethod - def xml_to_namespace(xmlstr): - '''Converts xml response to service bus namespace - - The xml format for namespace: -<entry> -<id>uuid:00000000-0000-0000-0000-000000000000;id=0000000</id> -<title type="text">myunittests -2012-08-22T16:48:10Z - - - myunittests - West US - 0000000000000000000000000000000000000000000= - Active - 2012-08-22T16:48:10.217Z - https://myunittests-sb.accesscontrol.windows.net/ - https://myunittests.servicebus.windows.net/ - Endpoint=sb://myunittests.servicebus.windows.net/;SharedSecretIssuer=owner;SharedSecretValue=0000000000000000000000000000000000000000000= - 00000000000000000000000000000000 - true - - - - ''' - xmldoc = minidom.parseString(xmlstr) - namespace = ServiceBusNamespace() - - mappings = ( - ('Name', 'name', None), - ('Region', 'region', None), - ('DefaultKey', 'default_key', None), - ('Status', 'status', None), - ('CreatedAt', 'created_at', None), - ('AcsManagementEndpoint', 'acs_management_endpoint', None), - ('ServiceBusEndpoint', 'servicebus_endpoint', None), - ('ConnectionString', 'connection_string', None), - ('SubscriptionId', 'subscription_id', None), - ('Enabled', 'enabled', _parse_bool), - ) - - for desc in _get_children_from_path(xmldoc, - 'entry', - 'content', - 'NamespaceDescription'): - for xml_name, field_name, conversion_func in mappings: - node_value = _get_first_child_node_value(desc, xml_name) - if node_value is not None: - if conversion_func is not None: - node_value = conversion_func(node_value) - setattr(namespace, field_name, node_value) - - return namespace - - @staticmethod - def xml_to_region(xmlstr): - '''Converts xml response to service bus region - - The xml format for region: - -uuid:157c311f-081f-4b4a-a0ba-a8f990ffd2a3;id=1756759 - -2013-04-10T18:25:29Z - - - East Asia - East Asia - - - - ''' - xmldoc = minidom.parseString(xmlstr) - region = ServiceBusRegion() - - for desc in _get_children_from_path(xmldoc, 'entry', 'content', - 'RegionCodeDescription'): - node_value = _get_first_child_node_value(desc, 'Code') - if node_value is not None: - region.code = node_value - node_value = _get_first_child_node_value(desc, 'FullName') - if node_value is not None: - region.fullname = node_value - - return region - - @staticmethod - def xml_to_namespace_availability(xmlstr): - '''Converts xml response to service bus namespace availability - - The xml format: - - - uuid:9fc7c652-1856-47ab-8d74-cd31502ea8e6;id=3683292 - - 2013-04-16T03:03:37Z - - - false - - - - ''' - xmldoc = minidom.parseString(xmlstr) - availability = AvailabilityResponse() - - for desc in _get_children_from_path(xmldoc, 'entry', 'content', - 'NamespaceAvailability'): - node_value = _get_first_child_node_value(desc, 'Result') - if node_value is not None: - availability.result = _parse_bool(node_value) - - return availability - -from azure.servicemanagement.servicemanagementservice import ( - ServiceManagementService) -from azure.servicemanagement.servicebusmanagementservice import ( - ServiceBusManagementService) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from xml.dom import minidom +from azure import ( + WindowsAzureData, + _Base64String, + _create_entry, + _dict_of, + _encode_base64, + _general_error_handler, + _get_children_from_path, + _get_first_child_node_value, + _list_of, + _scalar_list_of, + _str, + _xml_attribute, + _get_entry_properties_from_node, + _get_child_nodes, + _get_serialization_name, + ) + +#----------------------------------------------------------------------------- +# Constants for Azure app environment settings. +AZURE_MANAGEMENT_CERTFILE = 'AZURE_MANAGEMENT_CERTFILE' +AZURE_MANAGEMENT_SUBSCRIPTIONID = 'AZURE_MANAGEMENT_SUBSCRIPTIONID' + +# x-ms-version for service management. +X_MS_VERSION = '2014-06-01' + +#----------------------------------------------------------------------------- +# Data classes + + +class StorageServices(WindowsAzureData): + + def __init__(self): + self.storage_services = _list_of(StorageService) + + def __iter__(self): + return iter(self.storage_services) + + def __len__(self): + return len(self.storage_services) + + def __getitem__(self, index): + return self.storage_services[index] + + +class StorageService(WindowsAzureData): + + def __init__(self): + self.url = '' + self.service_name = '' + self.storage_service_properties = StorageAccountProperties() + self.storage_service_keys = StorageServiceKeys() + self.extended_properties = _dict_of( + 'ExtendedProperty', 'Name', 'Value') + self.capabilities = _scalar_list_of(str, 'Capability') + + +class StorageAccountProperties(WindowsAzureData): + + def __init__(self): + self.description = u'' + self.affinity_group = u'' + self.location = u'' + self.label = _Base64String() + self.status = u'' + self.endpoints = _scalar_list_of(str, 'Endpoint') + self.geo_replication_enabled = False + self.geo_primary_region = u'' + self.status_of_primary = u'' + self.geo_secondary_region = u'' + self.status_of_secondary = u'' + self.last_geo_failover_time = u'' + self.creation_time = u'' + self.account_type = u'' + + +class StorageServiceKeys(WindowsAzureData): + + def __init__(self): + self.primary = u'' + self.secondary = u'' + + +class Locations(WindowsAzureData): + + def __init__(self): + self.locations = _list_of(Location) + + def __iter__(self): + return iter(self.locations) + + def __len__(self): + return len(self.locations) + + def __getitem__(self, index): + return self.locations[index] + + +class Location(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.display_name = u'' + self.available_services = _scalar_list_of(str, 'AvailableService') + self.compute_capabilities = ComputeCapabilities() + + +class ComputeCapabilities(WindowsAzureData): + + def __init__(self): + self.web_worker_role_sizes = _scalar_list_of(str, 'RoleSize') + self.virtual_machines_role_sizes = _scalar_list_of(str, 'RoleSize') + + +class AffinityGroup(WindowsAzureData): + + def __init__(self): + self.name = '' + self.label = _Base64String() + self.description = u'' + self.location = u'' + self.hosted_services = HostedServices() + self.storage_services = StorageServices() + self.capabilities = _scalar_list_of(str, 'Capability') + + +class AffinityGroups(WindowsAzureData): + + def __init__(self): + self.affinity_groups = _list_of(AffinityGroup) + + def __iter__(self): + return iter(self.affinity_groups) + + def __len__(self): + return len(self.affinity_groups) + + def __getitem__(self, index): + return self.affinity_groups[index] + + +class HostedServices(WindowsAzureData): + + def __init__(self): + self.hosted_services = _list_of(HostedService) + + def __iter__(self): + return iter(self.hosted_services) + + def __len__(self): + return len(self.hosted_services) + + def __getitem__(self, index): + return self.hosted_services[index] + + +class HostedService(WindowsAzureData): + + def __init__(self): + self.url = u'' + self.service_name = u'' + self.hosted_service_properties = HostedServiceProperties() + self.deployments = Deployments() + + +class HostedServiceProperties(WindowsAzureData): + + def __init__(self): + self.description = u'' + self.location = u'' + self.affinity_group = u'' + self.label = _Base64String() + self.status = u'' + self.date_created = u'' + self.date_last_modified = u'' + self.extended_properties = _dict_of( + 'ExtendedProperty', 'Name', 'Value') + + +class VirtualNetworkSites(WindowsAzureData): + + def __init__(self): + self.virtual_network_sites = _list_of(VirtualNetworkSite) + + def __iter__(self): + return iter(self.virtual_network_sites) + + def __len__(self): + return len(self.virtual_network_sites) + + def __getitem__(self, index): + return self.virtual_network_sites[index] + + +class VirtualNetworkSite(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.id = u'' + self.affinity_group = u'' + self.subnets = Subnets() + + +class Subnets(WindowsAzureData): + + def __init__(self): + self.subnets = _list_of(Subnet) + + def __iter__(self): + return iter(self.subnets) + + def __len__(self): + return len(self.subnets) + + def __getitem__(self, index): + return self.subnets[index] + + +class Subnet(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.address_prefix = u'' + + + +class Deployments(WindowsAzureData): + + def __init__(self): + self.deployments = _list_of(Deployment) + + def __iter__(self): + return iter(self.deployments) + + def __len__(self): + return len(self.deployments) + + def __getitem__(self, index): + return self.deployments[index] + + +class Deployment(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.deployment_slot = u'' + self.private_id = u'' + self.status = u'' + self.label = _Base64String() + self.url = u'' + self.configuration = _Base64String() + self.role_instance_list = RoleInstanceList() + self.upgrade_status = UpgradeStatus() + self.upgrade_domain_count = u'' + self.role_list = RoleList() + self.sdk_version = u'' + self.input_endpoint_list = InputEndpoints() + self.locked = False + self.rollback_allowed = False + self.persistent_vm_downtime_info = PersistentVMDowntimeInfo() + self.created_time = u'' + self.virtual_network_name = u'' + self.last_modified_time = u'' + self.extended_properties = _dict_of( + 'ExtendedProperty', 'Name', 'Value') + + +class RoleInstanceList(WindowsAzureData): + + def __init__(self): + self.role_instances = _list_of(RoleInstance) + + def __iter__(self): + return iter(self.role_instances) + + def __len__(self): + return len(self.role_instances) + + def __getitem__(self, index): + return self.role_instances[index] + + +class RoleInstance(WindowsAzureData): + + def __init__(self): + self.role_name = u'' + self.instance_name = u'' + self.instance_status = u'' + self.instance_upgrade_domain = 0 + self.instance_fault_domain = 0 + self.instance_size = u'' + self.instance_state_details = u'' + self.instance_error_code = u'' + self.ip_address = u'' + self.instance_endpoints = InstanceEndpoints() + self.power_state = u'' + self.fqdn = u'' + self.host_name = u'' + self.public_ips = PublicIPs() + +class InstanceEndpoints(WindowsAzureData): + + def __init__(self): + self.instance_endpoints = _list_of(InstanceEndpoint) + + def __iter__(self): + return iter(self.instance_endpoints) + + def __len__(self): + return len(self.instance_endpoints) + + def __getitem__(self, index): + return self.instance_endpoints[index] + + +class InstanceEndpoint(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.vip = u'' + self.public_port = u'' + self.local_port = u'' + self.protocol = u'' + + +class UpgradeStatus(WindowsAzureData): + + def __init__(self): + self.upgrade_type = u'' + self.current_upgrade_domain_state = u'' + self.current_upgrade_domain = u'' + + +class InputEndpoints(WindowsAzureData): + + def __init__(self): + self.input_endpoints = _list_of(InputEndpoint) + + def __iter__(self): + return iter(self.input_endpoints) + + def __len__(self): + return len(self.input_endpoints) + + def __getitem__(self, index): + return self.input_endpoints[index] + + +class InputEndpoint(WindowsAzureData): + + def __init__(self): + self.role_name = u'' + self.vip = u'' + self.port = u'' + + +class RoleList(WindowsAzureData): + + def __init__(self): + self.roles = _list_of(Role) + + def __iter__(self): + return iter(self.roles) + + def __len__(self): + return len(self.roles) + + def __getitem__(self, index): + return self.roles[index] + + +class Role(WindowsAzureData): + + def __init__(self): + self.role_name = u'' + self.role_type = u'' + self.os_version = u'' + self.configuration_sets = ConfigurationSets() + self.availability_set_name = u'' + self.data_virtual_hard_disks = DataVirtualHardDisks() + self.os_virtual_hard_disk = OSVirtualHardDisk() + self.role_size = u'' + self.default_win_rm_certificate_thumbprint = u'' + + +class CaptureRoleAsVMImage: + + def __init__(self, os_state=None, vm_image_name=None, vm_image_label=None, + description=None, language=None, image_family=None, + recommended_vm_size=None): + self.os_state = os_state + self.vm_image_name = vm_image_name + self.vm_image_label = vm_image_label + self.description = description + self.language = language + self.image_family = image_family + self.recommended_vm_size = recommended_vm_size + + +class OSDiskConfiguration(WindowsAzureData): + + def __init__(self): + self.name = None + self.host_caching = None + self.os_state = None + self.os = None + self.media_link = None + self.logical_disk_size_in_gb = -1 + + +class DataDiskConfigurations(WindowsAzureData): + + def __init__(self): + self.data_disk_configurations = _list_of(DataDiskConfiguration) + + def __iter__(self): + return iter(self.data_disk_configurations) + + def __len__(self): + return len(self.data_disk_configurations) + + def __getitem__(self, index): + return self.data_disk_configurations[index] + + +class DataDiskConfiguration(WindowsAzureData): + + def __init__(self): + self.name = None + self.host_caching = None + self.lun = -1 + self.media_link = None + self.logical_disk_size_in_gb = -1 + + +class VMImages(WindowsAzureData): + + def __init__(self): + self.vm_images = _list_of(VMImage) + + def __iter__(self): + return iter(self.vm_images) + + def __len__(self): + return len(self.vm_images) + + def __getitem__(self, index): + return self.vm_images[index] + + +class VMImage(WindowsAzureData): + + def __init__(self, name=None, label=None, description=None): + self.name = name + self.label = label + self.category = None # read-only + self.description = description + self.os_disk_configuration = OSDiskConfiguration() + self.data_disk_configurations = DataDiskConfigurations() + self.service_name = None # read-only + self.deployment_name = None # read-only + self.role_name = None # read-only + self.location = None # read-only + self.affinity_group = None # read-only + self.created_time = None # read-only + self.modified_time = None # read-only + self.language = None + self.image_family = None + self.recommended_vm_size = None + self.is_premium = False # read-only + self.eula = None + self.icon_uri = None + self.small_icon_uri = None + self.privacy_uri = None + self.publisher_name = None # read-only + self.published_date = None + self.show_in_gui = False + self.pricing_detail_link = None # read-only + + +class ResourceExtensions(WindowsAzureData): + + def __init__(self): + self.resource_extensions = _list_of(ResourceExtension) + + def __iter__(self): + return iter(self.resource_extensions) + + def __len__(self): + return len(self.resource_extensions) + + def __getitem__(self, index): + return self.resource_extensions[index] + + +class ResourceExtension(WindowsAzureData): + + def __init__(self): + self.publisher = u'' + self.name = u'' + self.version = u'' + self.label = u'' + self.description = u'' + self.public_configuration_schema = u'' + self.private_configuration_schema = u'' + self.sample_config = u'' + self.replication_completed = False + self.eula = u'' + self.privacy_uri = u'' + self.homepage_uri = u'' + self.is_json_extension = False + self.is_internal_extension = False + self.disallow_major_version_upgrade = False + self.company_name = u'' + self.supported_os = u'' + self.published_date = u'' + + +class ResourceExtensionParameterValues(WindowsAzureData): + + def __init__(self): + self.resource_extension_parameter_values = _list_of(ResourceExtensionParameterValue) + + def __iter__(self): + return iter(self.resource_extension_parameter_values) + + def __len__(self): + return len(self.resource_extension_parameter_values) + + def __getitem__(self, index): + return self.resource_extension_parameter_values[index] + + +class ResourceExtensionParameterValue(WindowsAzureData): + + def __init__(self): + self.key = u'' + self.value = u'' + self.type = u'' + + +class ResourceExtensionReferences(WindowsAzureData): + + def __init__(self): + self.resource_extension_references = _list_of(ResourceExtensionReference) + + def __iter__(self): + return iter(self.resource_extension_references) + + def __len__(self): + return len(self.resource_extension_references) + + def __getitem__(self, index): + return self.resource_extension_references[index] + + +class ResourceExtensionReference(WindowsAzureData): + + def __init__(self, reference_name=u'', publisher=u'', name=u'', version=u''): + self.reference_name = reference_name + self.publisher = publisher + self.name = name + self.version = version + self.resource_extension_parameter_values = ResourceExtensionParameterValues() + self.state = u'' + self.certificates = Certificates() + + +class AdditionalUnattendContent(WindowsAzureData): + + def __init__(self): + self.passes = Passes() + + +class Passes(WindowsAzureData): + + def __init__(self): + self.passes = _list_of(UnattendPass) + + def __iter__(self): + return iter(self.passes) + + def __len__(self): + return len(self.passes) + + def __getitem__(self, index): + return self.passes[index] + + +class UnattendPass(WindowsAzureData): + + def __init__(self): + self.pass_name = u'' + self.components = Components() + + +class Components(WindowsAzureData): + + def __init__(self): + self.components = _list_of(UnattendComponent) + + def __iter__(self): + return iter(self.components) + + def __len__(self): + return len(self.components) + + def __getitem__(self, index): + return self.components[index] + + +class UnattendComponent(WindowsAzureData): + + def __init__(self): + self.component_name = u'' + self.component_settings = ComponentSettings() + + +class ComponentSettings(WindowsAzureData): + + def __init__(self): + self.component_settings = _list_of(ComponentSetting) + + def __iter__(self): + return iter(self.component_settings) + + def __len__(self): + return len(self.component_settings) + + def __getitem__(self, index): + return self.component_settings[index] + + +class ComponentSetting(WindowsAzureData): + + def __init__(self): + self.setting_name = u'' + self.content = u'' + + +class DnsServer(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.address = u'' + + +class ReservedIPs(WindowsAzureData): + + def __init__(self): + self.reserved_ips = _list_of(ReservedIP) + + def __iter__(self): + return iter(self.reserved_ips) + + def __len__(self): + return len(self.reserved_ips) + + def __getitem__(self, index): + return self.reserved_ips[index] + + +class ReservedIP(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.address = u'' + self.id = u'' + self.label = u'' + self.state = u'' + self.in_use = False + self.service_name = u'' + self.deployment_name = u'' + self.location = u'' + + +class PersistentVMDowntimeInfo(WindowsAzureData): + + def __init__(self): + self.start_time = u'' + self.end_time = u'' + self.status = u'' + + +class Certificates(WindowsAzureData): + + def __init__(self): + self.certificates = _list_of(Certificate) + + def __iter__(self): + return iter(self.certificates) + + def __len__(self): + return len(self.certificates) + + def __getitem__(self, index): + return self.certificates[index] + + +class Certificate(WindowsAzureData): + + def __init__(self): + self.certificate_url = u'' + self.thumbprint = u'' + self.thumbprint_algorithm = u'' + self.data = u'' + + +class OperationError(WindowsAzureData): + + def __init__(self): + self.code = u'' + self.message = u'' + + +class Operation(WindowsAzureData): + + def __init__(self): + self.id = u'' + self.status = u'' + self.http_status_code = u'' + self.error = OperationError() + + +class OperatingSystem(WindowsAzureData): + + def __init__(self): + self.version = u'' + self.label = _Base64String() + self.is_default = True + self.is_active = True + self.family = 0 + self.family_label = _Base64String() + + +class OperatingSystems(WindowsAzureData): + + def __init__(self): + self.operating_systems = _list_of(OperatingSystem) + + def __iter__(self): + return iter(self.operating_systems) + + def __len__(self): + return len(self.operating_systems) + + def __getitem__(self, index): + return self.operating_systems[index] + + +class OperatingSystemFamily(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.label = _Base64String() + self.operating_systems = OperatingSystems() + + +class OperatingSystemFamilies(WindowsAzureData): + + def __init__(self): + self.operating_system_families = _list_of(OperatingSystemFamily) + + def __iter__(self): + return iter(self.operating_system_families) + + def __len__(self): + return len(self.operating_system_families) + + def __getitem__(self, index): + return self.operating_system_families[index] + + +class Subscriptions(WindowsAzureData): + + def __init__(self): + self.subscriptions = _list_of(Subscription) + + def __iter__(self): + return iter(self.subscriptions) + + def __len__(self): + return len(self.subscriptions) + + def __getitem__(self, index): + return self.subscriptions[index] + + +class Subscription(WindowsAzureData): + + def __init__(self): + self.subscription_id = u'' + self.subscription_name = u'' + self.subscription_status = u'' + self.account_admin_live_email_id = u'' + self.service_admin_live_email_id = u'' + self.max_core_count = 0 + self.max_storage_accounts = 0 + self.max_hosted_services = 0 + self.current_core_count = 0 + self.current_hosted_services = 0 + self.current_storage_accounts = 0 + self.max_virtual_network_sites = 0 + self.current_virtual_network_sites = 0 + self.max_local_network_sites = 0 + self.max_dns_servers = 0 + self.aad_tenant_id = u'' + self.created_time = u'' + + +class AvailabilityResponse(WindowsAzureData): + + def __init__(self): + self.result = False + self.reason = False + + +class SubscriptionCertificates(WindowsAzureData): + + def __init__(self): + self.subscription_certificates = _list_of(SubscriptionCertificate) + + def __iter__(self): + return iter(self.subscription_certificates) + + def __len__(self): + return len(self.subscription_certificates) + + def __getitem__(self, index): + return self.subscription_certificates[index] + + +class SubscriptionCertificate(WindowsAzureData): + + def __init__(self): + self.subscription_certificate_public_key = u'' + self.subscription_certificate_thumbprint = u'' + self.subscription_certificate_data = u'' + self.created = u'' + + +class RoleSizes(WindowsAzureData): + + def __init__(self): + self.role_sizes = _list_of(RoleSize) + + def __iter__(self): + return iter(self.role_sizes) + + def __len__(self): + return len(self.role_sizes) + + def __getitem__(self, index): + return self.role_sizes[index] + + +class RoleSize(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.label = u'' + self.cores = 0 + self.memory_in_mb = 0 + self.supported_by_web_worker_roles = False + self.supported_by_virtual_machines = False + self.max_data_disk_count = 0 + self.web_worker_resource_disk_size_in_mb = 0 + self.virtual_machine_resource_disk_size_in_mb = 0 + + +class Images(WindowsAzureData): + + def __init__(self): + self.images = _list_of(OSImage) + + def __iter__(self): + return iter(self.images) + + def __len__(self): + return len(self.images) + + def __getitem__(self, index): + return self.images[index] + + +class OSImage(WindowsAzureData): + + def __init__(self): + self.affinity_group = u'' + self.category = u'' + self.location = u'' + self.logical_size_in_gb = 0 + self.label = u'' + self.media_link = u'' + self.name = u'' + self.os = u'' + self.eula = u'' + self.description = u'' + self.image_family = u'' + self.show_in_gui = True + self.published_date = u'' + self.is_premium = True + self.icon_uri = u'' + self.privacy_uri = u'' + self.recommended_vm_size = u'' + self.publisher_name = u'' + self.pricing_detail_link = u'' + self.small_icon_uri = u'' + self.os_state = u'' + self.language = u'' + + +class Disks(WindowsAzureData): + + def __init__(self): + self.disks = _list_of(Disk) + + def __iter__(self): + return iter(self.disks) + + def __len__(self): + return len(self.disks) + + def __getitem__(self, index): + return self.disks[index] + + +class Disk(WindowsAzureData): + + def __init__(self): + self.affinity_group = u'' + self.attached_to = AttachedTo() + self.has_operating_system = u'' + self.is_corrupted = u'' + self.location = u'' + self.logical_disk_size_in_gb = 0 + self.label = u'' + self.media_link = u'' + self.name = u'' + self.os = u'' + self.source_image_name = u'' + + +class AttachedTo(WindowsAzureData): + + def __init__(self): + self.hosted_service_name = u'' + self.deployment_name = u'' + self.role_name = u'' + + +class PersistentVMRole(WindowsAzureData): + + def __init__(self): + self.role_name = u'' + self.role_type = u'' + self.os_version = u'' # undocumented + self.configuration_sets = ConfigurationSets() + self.availability_set_name = u'' + self.data_virtual_hard_disks = DataVirtualHardDisks() + self.os_virtual_hard_disk = OSVirtualHardDisk() + self.role_size = u'' + self.default_win_rm_certificate_thumbprint = u'' + + +class ConfigurationSets(WindowsAzureData): + + def __init__(self): + self.configuration_sets = _list_of(ConfigurationSet) + + def __iter__(self): + return iter(self.configuration_sets) + + def __len__(self): + return len(self.configuration_sets) + + def __getitem__(self, index): + return self.configuration_sets[index] + + +class PublicIPs(WindowsAzureData): + + def __init__(self): + self.public_ips = _list_of(PublicIP) + + def __iter__(self): + return iter(self.public_ips) + + def __len__(self): + return len(self.public_ips) + + def __getitem__(self, index): + return self.public_ips[index] + + +class PublicIP(WindowsAzureData): + + def __init__(self, name=u''): + self.name = name + self.idle_timeout_in_minutes = 4 + self.address = None + +class ConfigurationSet(WindowsAzureData): + + def __init__(self): + self.configuration_set_type = u'NetworkConfiguration' + self.role_type = u'' + self.input_endpoints = ConfigurationSetInputEndpoints() + self.subnet_names = _scalar_list_of(str, 'SubnetName') + self.public_ips = PublicIPs() + + +class ConfigurationSetInputEndpoints(WindowsAzureData): + + def __init__(self): + self.input_endpoints = _list_of( + ConfigurationSetInputEndpoint, 'InputEndpoint') + + def __iter__(self): + return iter(self.input_endpoints) + + def __len__(self): + return len(self.input_endpoints) + + def __getitem__(self, index): + return self.input_endpoints[index] + + +class ConfigurationSetInputEndpoint(WindowsAzureData): + + ''' + Initializes a network configuration input endpoint. + + name: Specifies the name for the external endpoint. + protocol: + Specifies the protocol to use to inspect the virtual machine + availability status. Possible values are: HTTP, TCP. + port: Specifies the external port to use for the endpoint. + local_port: + Specifies the internal port on which the virtual machine is listening + to serve the endpoint. + load_balanced_endpoint_set_name: + Specifies a name for a set of load-balanced endpoints. Specifying this + element for a given endpoint adds it to the set. If you are setting an + endpoint to use to connect to the virtual machine via the Remote + Desktop, do not set this property. + enable_direct_server_return: + Specifies whether direct server return load balancing is enabled. + ''' + + def __init__(self, name=u'', protocol=u'', port=u'', local_port=u'', + load_balanced_endpoint_set_name=u'', + enable_direct_server_return=False): + self.enable_direct_server_return = enable_direct_server_return + self.load_balanced_endpoint_set_name = load_balanced_endpoint_set_name + self.local_port = local_port + self.name = name + self.port = port + self.load_balancer_probe = LoadBalancerProbe() + self.protocol = protocol + + +class WindowsConfigurationSet(WindowsAzureData): + + def __init__(self, computer_name=None, admin_password=None, + reset_password_on_first_logon=None, + enable_automatic_updates=None, time_zone=None, + admin_username=None, custom_data=None): + self.configuration_set_type = u'WindowsProvisioningConfiguration' + self.computer_name = computer_name + self.admin_password = admin_password + self.admin_username = admin_username + self.reset_password_on_first_logon = reset_password_on_first_logon + self.enable_automatic_updates = enable_automatic_updates + self.time_zone = time_zone + self.domain_join = DomainJoin() + self.stored_certificate_settings = StoredCertificateSettings() + self.win_rm = WinRM() + self.custom_data = custom_data + self.additional_unattend_content = AdditionalUnattendContent() + + +class DomainJoin(WindowsAzureData): + + def __init__(self): + self.credentials = Credentials() + self.join_domain = u'' + self.machine_object_ou = u'' + + +class Credentials(WindowsAzureData): + + def __init__(self): + self.domain = u'' + self.username = u'' + self.password = u'' + + +class StoredCertificateSettings(WindowsAzureData): + + def __init__(self): + self.stored_certificate_settings = _list_of(CertificateSetting) + + def __iter__(self): + return iter(self.stored_certificate_settings) + + def __len__(self): + return len(self.stored_certificate_settings) + + def __getitem__(self, index): + return self.stored_certificate_settings[index] + + +class CertificateSetting(WindowsAzureData): + + ''' + Initializes a certificate setting. + + thumbprint: + Specifies the thumbprint of the certificate to be provisioned. The + thumbprint must specify an existing service certificate. + store_name: + Specifies the name of the certificate store from which retrieve + certificate. + store_location: + Specifies the target certificate store location on the virtual machine. + The only supported value is LocalMachine. + ''' + + def __init__(self, thumbprint=u'', store_name=u'', store_location=u''): + self.thumbprint = thumbprint + self.store_name = store_name + self.store_location = store_location + + +class WinRM(WindowsAzureData): + + ''' + Contains configuration settings for the Windows Remote Management service on + the Virtual Machine. + ''' + + def __init__(self): + self.listeners = Listeners() + + +class Listeners(WindowsAzureData): + + def __init__(self): + self.listeners = _list_of(Listener) + + def __iter__(self): + return iter(self.listeners) + + def __len__(self): + return len(self.listeners) + + def __getitem__(self, index): + return self.listeners[index] + + +class Listener(WindowsAzureData): + + ''' + Specifies the protocol and certificate information for the listener. + + protocol: + Specifies the protocol of listener. Possible values are: Http, Https. + The value is case sensitive. + certificate_thumbprint: + Optional. Specifies the certificate thumbprint for the secure + connection. If this value is not specified, a self-signed certificate is + generated and used for the Virtual Machine. + ''' + + def __init__(self, protocol=u'', certificate_thumbprint=u''): + self.protocol = protocol + self.certificate_thumbprint = certificate_thumbprint + + +class LinuxConfigurationSet(WindowsAzureData): + + def __init__(self, host_name=None, user_name=None, user_password=None, + disable_ssh_password_authentication=None, custom_data=None): + self.configuration_set_type = u'LinuxProvisioningConfiguration' + self.host_name = host_name + self.user_name = user_name + self.user_password = user_password + self.disable_ssh_password_authentication =\ + disable_ssh_password_authentication + self.ssh = SSH() + self.custom_data = custom_data + + +class SSH(WindowsAzureData): + + def __init__(self): + self.public_keys = PublicKeys() + self.key_pairs = KeyPairs() + + +class PublicKeys(WindowsAzureData): + + def __init__(self): + self.public_keys = _list_of(PublicKey) + + def __iter__(self): + return iter(self.public_keys) + + def __len__(self): + return len(self.public_keys) + + def __getitem__(self, index): + return self.public_keys[index] + + +class PublicKey(WindowsAzureData): + + def __init__(self, fingerprint=u'', path=u''): + self.fingerprint = fingerprint + self.path = path + + +class KeyPairs(WindowsAzureData): + + def __init__(self): + self.key_pairs = _list_of(KeyPair) + + def __iter__(self): + return iter(self.key_pairs) + + def __len__(self): + return len(self.key_pairs) + + def __getitem__(self, index): + return self.key_pairs[index] + + +class KeyPair(WindowsAzureData): + + def __init__(self, fingerprint=u'', path=u''): + self.fingerprint = fingerprint + self.path = path + + +class LoadBalancerProbe(WindowsAzureData): + + def __init__(self): + self.path = u'' + self.port = u'' + self.protocol = u'' + + +class DataVirtualHardDisks(WindowsAzureData): + + def __init__(self): + self.data_virtual_hard_disks = _list_of(DataVirtualHardDisk) + + def __iter__(self): + return iter(self.data_virtual_hard_disks) + + def __len__(self): + return len(self.data_virtual_hard_disks) + + def __getitem__(self, index): + return self.data_virtual_hard_disks[index] + + +class DataVirtualHardDisk(WindowsAzureData): + + def __init__(self): + self.host_caching = None + self.disk_label = u'' + self.disk_name = u'' + self.lun = 0 + self.logical_disk_size_in_gb = 0 + self.media_link = u'' + + +class OSVirtualHardDisk(WindowsAzureData): + + def __init__(self, source_image_name=None, media_link=None, + host_caching=None, disk_label=None, disk_name=None, + os=None, remote_source_image_link=None): + self.source_image_name = source_image_name + self.media_link = media_link + self.host_caching = host_caching + self.disk_label = disk_label + self.disk_name = disk_name + self.os = os + self.remote_source_image_link = remote_source_image_link + +class AsynchronousOperationResult(WindowsAzureData): + + def __init__(self, request_id=None): + self.request_id = request_id + + +class ServiceBusRegion(WindowsAzureData): + + def __init__(self): + self.code = u'' + self.fullname = u'' + + +class ServiceBusNamespace(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.region = u'' + self.default_key = u'' + self.status = u'' + self.created_at = u'' + self.acs_management_endpoint = u'' + self.servicebus_endpoint = u'' + self.connection_string = u'' + self.subscription_id = u'' + self.enabled = False + + +class MetricProperties(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.primary_aggregation = u'' + self.unit = u'' + self.display_name = u'' + + +class MetricValues(WindowsAzureData): + + def __init__(self): + self.timestamp = u'' + self.min = 0 + self.max = 0 + self.average = 0 + self.total = 0 + + +class MetricRollups(WindowsAzureData): + + def __init__(self): + self.time_grain = u'' + self.retention = u'' + + +class WebSpaces(WindowsAzureData): + + def __init__(self): + self.web_space = _list_of(WebSpace) + + def __iter__(self): + return iter(self.web_space) + + def __len__(self): + return len(self.web_space) + + def __getitem__(self, index): + return self.web_space[index] + + +class WebSpace(WindowsAzureData): + + def __init__(self): + self.availability_state = u'' + self.geo_location = u'' + self.geo_region = u'' + self.name = u'' + self.plan = u'' + self.status = u'' + self.subscription = u'' + + +class Sites(WindowsAzureData): + + def __init__(self): + self.site = _list_of(Site) + + def __iter__(self): + return iter(self.site) + + def __len__(self): + return len(self.site) + + def __getitem__(self, index): + return self.site[index] + + +class Site(WindowsAzureData): + + def __init__(self): + self.admin_enabled = False + self.availability_state = '' + self.compute_mode = '' + self.enabled = False + self.enabled_host_names = _scalar_list_of(str, 'a:string') + self.host_name_ssl_states = HostNameSslStates() + self.host_names = _scalar_list_of(str, 'a:string') + self.last_modified_time_utc = '' + self.name = '' + self.repository_site_name = '' + self.self_link = '' + self.server_farm = '' + self.site_mode = '' + self.state = '' + self.storage_recovery_default_state = '' + self.usage_state = '' + self.web_space = '' + + +class HostNameSslStates(WindowsAzureData): + + def __init__(self): + self.host_name_ssl_state = _list_of(HostNameSslState) + + def __iter__(self): + return iter(self.host_name_ssl_state) + + def __len__(self): + return len(self.host_name_ssl_state) + + def __getitem__(self, index): + return self.host_name_ssl_state[index] + + +class HostNameSslState(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.ssl_state = u'' + + +class PublishData(WindowsAzureData): + _xml_name = 'publishData' + + def __init__(self): + self.publish_profiles = _list_of(PublishProfile, 'publishProfile') + +class PublishProfile(WindowsAzureData): + + def __init__(self): + self.profile_name = _xml_attribute('profileName') + self.publish_method = _xml_attribute('publishMethod') + self.publish_url = _xml_attribute('publishUrl') + self.msdeploysite = _xml_attribute('msdeploySite') + self.user_name = _xml_attribute('userName') + self.user_pwd = _xml_attribute('userPWD') + self.destination_app_url = _xml_attribute('destinationAppUrl') + self.sql_server_db_connection_string = _xml_attribute('SQLServerDBConnectionString') + self.my_sqldb_connection_string = _xml_attribute('mySQLDBConnectionString') + self.hosting_provider_forum_link = _xml_attribute('hostingProviderForumLink') + self.control_panel_link = _xml_attribute('controlPanelLink') + +class QueueDescription(WindowsAzureData): + + def __init__(self): + self.lock_duration = u'' + self.max_size_in_megabytes = 0 + self.requires_duplicate_detection = False + self.requires_session = False + self.default_message_time_to_live = u'' + self.dead_lettering_on_message_expiration = False + self.duplicate_detection_history_time_window = u'' + self.max_delivery_count = 0 + self.enable_batched_operations = False + self.size_in_bytes = 0 + self.message_count = 0 + self.is_anonymous_accessible = False + self.authorization_rules = AuthorizationRules() + self.status = u'' + self.created_at = u'' + self.updated_at = u'' + self.accessed_at = u'' + self.support_ordering = False + self.auto_delete_on_idle = u'' + self.count_details = CountDetails() + self.entity_availability_status = u'' + +class TopicDescription(WindowsAzureData): + + def __init__(self): + self.default_message_time_to_live = u'' + self.max_size_in_megabytes = 0 + self.requires_duplicate_detection = False + self.duplicate_detection_history_time_window = u'' + self.enable_batched_operations = False + self.size_in_bytes = 0 + self.filtering_messages_before_publishing = False + self.is_anonymous_accessible = False + self.authorization_rules = AuthorizationRules() + self.status = u'' + self.created_at = u'' + self.updated_at = u'' + self.accessed_at = u'' + self.support_ordering = False + self.count_details = CountDetails() + self.subscription_count = 0 + +class CountDetails(WindowsAzureData): + + def __init__(self): + self.active_message_count = 0 + self.dead_letter_message_count = 0 + self.scheduled_message_count = 0 + self.transfer_message_count = 0 + self.transfer_dead_letter_message_count = 0 + +class NotificationHubDescription(WindowsAzureData): + + def __init__(self): + self.registration_ttl = u'' + self.authorization_rules = AuthorizationRules() + +class AuthorizationRules(WindowsAzureData): + + def __init__(self): + self.authorization_rule = _list_of(AuthorizationRule) + + def __iter__(self): + return iter(self.authorization_rule) + + def __len__(self): + return len(self.authorization_rule) + + def __getitem__(self, index): + return self.authorization_rule[index] + +class AuthorizationRule(WindowsAzureData): + + def __init__(self): + self.claim_type = u'' + self.claim_value = u'' + self.rights = _scalar_list_of(str, 'AccessRights') + self.created_time = u'' + self.modified_time = u'' + self.key_name = u'' + self.primary_key = u'' + self.secondary_keu = u'' + +class RelayDescription(WindowsAzureData): + + def __init__(self): + self.path = u'' + self.listener_type = u'' + self.listener_count = 0 + self.created_at = u'' + self.updated_at = u'' + + +class MetricResponses(WindowsAzureData): + + def __init__(self): + self.metric_response = _list_of(MetricResponse) + + def __iter__(self): + return iter(self.metric_response) + + def __len__(self): + return len(self.metric_response) + + def __getitem__(self, index): + return self.metric_response[index] + + +class MetricResponse(WindowsAzureData): + + def __init__(self): + self.code = u'' + self.data = Data() + self.message = u'' + + +class Data(WindowsAzureData): + + def __init__(self): + self.display_name = u'' + self.end_time = u'' + self.name = u'' + self.primary_aggregation_type = u'' + self.start_time = u'' + self.time_grain = u'' + self.unit = u'' + self.values = Values() + + +class Values(WindowsAzureData): + + def __init__(self): + self.metric_sample = _list_of(MetricSample) + + def __iter__(self): + return iter(self.metric_sample) + + def __len__(self): + return len(self.metric_sample) + + def __getitem__(self, index): + return self.metric_sample[index] + + +class MetricSample(WindowsAzureData): + + def __init__(self): + self.count = 0 + self.time_created = u'' + self.total = 0 + + +class MetricDefinitions(WindowsAzureData): + + def __init__(self): + self.metric_definition = _list_of(MetricDefinition) + + def __iter__(self): + return iter(self.metric_definition) + + def __len__(self): + return len(self.metric_definition) + + def __getitem__(self, index): + return self.metric_definition[index] + + +class MetricDefinition(WindowsAzureData): + + def __init__(self): + self.display_name = u'' + self.metric_availabilities = MetricAvailabilities() + self.name = u'' + self.primary_aggregation_type = u'' + self.unit = u'' + + +class MetricAvailabilities(WindowsAzureData): + + def __init__(self): + self.metric_availability = _list_of(MetricAvailability, 'MetricAvailabilily') + + def __iter__(self): + return iter(self.metric_availability) + + def __len__(self): + return len(self.metric_availability) + + def __getitem__(self, index): + return self.metric_availability[index] + + +class MetricAvailability(WindowsAzureData): + + def __init__(self): + self.retention = u'' + self.time_grain = u'' + + +class Servers(WindowsAzureData): + + def __init__(self): + self.server = _list_of(Server) + + def __iter__(self): + return iter(self.server) + + def __len__(self): + return len(self.server) + + def __getitem__(self, index): + return self.server[index] + + +class Server(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.administrator_login = u'' + self.location = u'' + self.geo_paired_region = u'' + self.fully_qualified_domain_name = u'' + self.version = u'' + + +class ServerQuota(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.type = u'' + self.state = u'' + self.self_link = u'' + self.parent_link = u'' + self.value = 0 + + +class EventLog(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.type = u'' + self.state = u'' + self.self_link = u'' + self.parent_link = u'' + self.database_name = u'' + self.name = u'' + self.start_time_utc = u'' + self.interval_size_in_minutes = 0 + self.event_category = u'' + self.event_type = u'' + self.event_subtype = 0 + self.event_subtype_description = u'' + self.number_of_events = 0 + self.severity = 0 + self.description = u'' + self.additional_data = u'' + + +class CreateServerResponse(WindowsAzureData): + + def __init__(self): + self.server_name = u'' + + +class Database(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.type = u'' + self.state = u'' + self.self_link = u'' + self.parent_link = u'' + self.id = 0 + self.edition = u'' + self.collation_name = u'' + self.creation_date = u'' + self.is_federation_root = False + self.is_system_object = False + self.max_size_bytes = 0 + + +class FirewallRule(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.type = u'' + self.state = u'' + self.self_link = u'' + self.parent_link = u'' + self.start_ip_address = u'' + self.end_ip_address = u'' + + +class ServiceObjective(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.type = u'' + self.state = u'' + self.self_link = u'' + self.parent_link = u'' + self.id = u'' + self.is_default = False + self.is_system = False + self.description = u'' + self.enabled = False + + +class CloudServices(WindowsAzureData): + + def __init__(self): + self.cloud_service = _list_of(CloudService) + + def __iter__(self): + return iter(self.cloud_service) + + def __len__(self): + return len(self.cloud_service) + + def __getitem__(self, index): + return self.cloud_service[index] + + +class CloudService(WindowsAzureData): + + def __init__(self): + self.name = u'' + self.label = u'' + self.description = u'' + self.geo_region = u'' + self.resources = Resources() + + +class Resources(WindowsAzureData): + + def __init__(self): + self.resource = _list_of(Resource) + + def __iter__(self): + return iter(self.resource) + + def __len__(self): + return len(self.resource) + + def __getitem__(self, index): + return self.resource[index] + + +class Resource(WindowsAzureData): + + def __init__(self): + self.resource_provider_namespace = u'' + self.type = u'' + self.name = u'' + self.schema_version = u'' + self.e_tag = u'' + self.state = u'' + self.intrinsic_settings = IntrinsicSettings() + self.operation_status = OperationStatus() + + +class IntrinsicSettings(WindowsAzureData): + + def __init__(self): + self.plan = u'' + self.quota = Quota() + + +class Quota(WindowsAzureData): + + def __init__(self): + self.max_job_count = 0 + self.max_recurrence = MaxRecurrence() + + +class MaxRecurrence(WindowsAzureData): + + def __init__(self): + self.frequency = u'' + self.interval = 0 + + +class OperationStatus(WindowsAzureData): + + def __init__(self): + self.type = u'' + self.result = u'' + + +def parse_response_for_async_op(response): + ''' Extracts request id from response header. ''' + + if response is None: + return None + + result = AsynchronousOperationResult() + if response.headers: + for name, value in response.headers: + if name.lower() == 'x-ms-request-id': + result.request_id = value + + return result + + +def _management_error_handler(http_error): + ''' Simple error handler for management service. ''' + return _general_error_handler(http_error) + + +def _lower(text): + return text.lower() + + +def _data_to_xml(data): + '''Creates an xml fragment from the specified data. + data: Array of tuples, where first: xml element name + second: xml element text + third: conversion function + ''' + xml = '' + for element in data: + name = element[0] + val = element[1] + if len(element) > 2: + converter = element[2] + else: + converter = None + + if val is not None: + if converter is not None: + text = _str(converter(_str(val))) + else: + text = _str(val) + + xml += ''.join(['<', name, '>', text, '']) + return xml + + +class _XmlSerializer(object): + + @staticmethod + def create_storage_service_input_to_xml(service_name, description, label, + affinity_group, location, + account_type, + extended_properties): + xml = _XmlSerializer.data_to_xml( + [('ServiceName', service_name), + ('Description', description), + ('Label', label, _encode_base64), + ('AffinityGroup', affinity_group), + ('Location', location)]) + if extended_properties is not None: + xml += _XmlSerializer.extended_properties_dict_to_xml_fragment( + extended_properties) + xml += _XmlSerializer.data_to_xml([('AccountType', account_type)]) + return _XmlSerializer.doc_from_xml('CreateStorageServiceInput', xml) + + @staticmethod + def update_storage_service_input_to_xml(description, label, + account_type, + extended_properties): + xml = _XmlSerializer.data_to_xml( + [('Description', description), + ('Label', label, _encode_base64)]) + if extended_properties is not None: + xml += _XmlSerializer.extended_properties_dict_to_xml_fragment( + extended_properties) + xml += _XmlSerializer.data_to_xml([('AccountType', account_type)]) + return _XmlSerializer.doc_from_xml('UpdateStorageServiceInput', xml) + + @staticmethod + def regenerate_keys_to_xml(key_type): + return _XmlSerializer.doc_from_data('RegenerateKeys', + [('KeyType', key_type)]) + + @staticmethod + def update_hosted_service_to_xml(label, description, extended_properties): + return _XmlSerializer.doc_from_data('UpdateHostedService', + [('Label', label, _encode_base64), + ('Description', description)], + extended_properties) + + @staticmethod + def create_hosted_service_to_xml(service_name, label, description, + location, affinity_group, + extended_properties): + return _XmlSerializer.doc_from_data( + 'CreateHostedService', + [('ServiceName', service_name), + ('Label', label, _encode_base64), + ('Description', description), + ('Location', location), + ('AffinityGroup', affinity_group)], + extended_properties) + + @staticmethod + def create_deployment_to_xml(name, package_url, label, configuration, + start_deployment, treat_warnings_as_error, + extended_properties): + return _XmlSerializer.doc_from_data( + 'CreateDeployment', + [('Name', name), + ('PackageUrl', package_url), + ('Label', label, _encode_base64), + ('Configuration', configuration), + ('StartDeployment', + start_deployment, _lower), + ('TreatWarningsAsError', treat_warnings_as_error, _lower)], + extended_properties) + + @staticmethod + def swap_deployment_to_xml(production, source_deployment): + return _XmlSerializer.doc_from_data( + 'Swap', + [('Production', production), + ('SourceDeployment', source_deployment)]) + + @staticmethod + def update_deployment_status_to_xml(status): + return _XmlSerializer.doc_from_data( + 'UpdateDeploymentStatus', + [('Status', status)]) + + @staticmethod + def change_deployment_to_xml(configuration, treat_warnings_as_error, mode, + extended_properties): + return _XmlSerializer.doc_from_data( + 'ChangeConfiguration', + [('Configuration', configuration), + ('TreatWarningsAsError', treat_warnings_as_error, _lower), + ('Mode', mode)], + extended_properties) + + @staticmethod + def upgrade_deployment_to_xml(mode, package_url, configuration, label, + role_to_upgrade, force, extended_properties): + return _XmlSerializer.doc_from_data( + 'UpgradeDeployment', + [('Mode', mode), + ('PackageUrl', package_url), + ('Configuration', configuration), + ('Label', label, _encode_base64), + ('RoleToUpgrade', role_to_upgrade), + ('Force', force, _lower)], + extended_properties) + + @staticmethod + def rollback_upgrade_to_xml(mode, force): + return _XmlSerializer.doc_from_data( + 'RollbackUpdateOrUpgrade', + [('Mode', mode), + ('Force', force, _lower)]) + + @staticmethod + def walk_upgrade_domain_to_xml(upgrade_domain): + return _XmlSerializer.doc_from_data( + 'WalkUpgradeDomain', + [('UpgradeDomain', upgrade_domain)]) + + @staticmethod + def certificate_file_to_xml(data, certificate_format, password): + return _XmlSerializer.doc_from_data( + 'CertificateFile', + [('Data', data), + ('CertificateFormat', certificate_format), + ('Password', password)]) + + @staticmethod + def create_affinity_group_to_xml(name, label, description, location): + return _XmlSerializer.doc_from_data( + 'CreateAffinityGroup', + [('Name', name), + ('Label', label, _encode_base64), + ('Description', description), + ('Location', location)]) + + @staticmethod + def update_affinity_group_to_xml(label, description): + return _XmlSerializer.doc_from_data( + 'UpdateAffinityGroup', + [('Label', label, _encode_base64), + ('Description', description)]) + + @staticmethod + def subscription_certificate_to_xml(public_key, thumbprint, data): + return _XmlSerializer.doc_from_data( + 'SubscriptionCertificate', + [('SubscriptionCertificatePublicKey', public_key), + ('SubscriptionCertificateThumbprint', thumbprint), + ('SubscriptionCertificateData', data)]) + + @staticmethod + def os_image_to_xml(label, media_link, name, os): + return _XmlSerializer.doc_from_data( + 'OSImage', + [('Label', label), + ('MediaLink', media_link), + ('Name', name), + ('OS', os)]) + + @staticmethod + def data_virtual_hard_disk_to_xml(host_caching, disk_label, disk_name, lun, + logical_disk_size_in_gb, media_link, + source_media_link): + return _XmlSerializer.doc_from_data( + 'DataVirtualHardDisk', + [('HostCaching', host_caching), + ('DiskLabel', disk_label), + ('DiskName', disk_name), + ('Lun', lun), + ('LogicalDiskSizeInGB', logical_disk_size_in_gb), + ('MediaLink', media_link), + ('SourceMediaLink', source_media_link)]) + + @staticmethod + def disk_to_xml(has_operating_system, label, media_link, name, os): + return _XmlSerializer.doc_from_data( + 'Disk', + [('HasOperatingSystem', has_operating_system, _lower), + ('Label', label), + ('MediaLink', media_link), + ('Name', name), + ('OS', os)]) + + @staticmethod + def restart_role_operation_to_xml(): + return _XmlSerializer.doc_from_xml( + 'RestartRoleOperation', + 'RestartRoleOperation') + + @staticmethod + def shutdown_role_operation_to_xml(post_shutdown_action): + xml = _XmlSerializer.data_to_xml( + [('OperationType', 'ShutdownRoleOperation'), + ('PostShutdownAction', post_shutdown_action)]) + return _XmlSerializer.doc_from_xml('ShutdownRoleOperation', xml) + + @staticmethod + def shutdown_roles_operation_to_xml(role_names, post_shutdown_action): + xml = _XmlSerializer.data_to_xml( + [('OperationType', 'ShutdownRolesOperation')]) + xml += '' + for role_name in role_names: + xml += _XmlSerializer.data_to_xml([('Name', role_name)]) + xml += '' + xml += _XmlSerializer.data_to_xml( + [('PostShutdownAction', post_shutdown_action)]) + return _XmlSerializer.doc_from_xml('ShutdownRolesOperation', xml) + + @staticmethod + def start_role_operation_to_xml(): + return _XmlSerializer.doc_from_xml( + 'StartRoleOperation', + 'StartRoleOperation') + + @staticmethod + def start_roles_operation_to_xml(role_names): + xml = _XmlSerializer.data_to_xml( + [('OperationType', 'StartRolesOperation')]) + xml += '' + for role_name in role_names: + xml += _XmlSerializer.data_to_xml([('Name', role_name)]) + xml += '' + return _XmlSerializer.doc_from_xml('StartRolesOperation', xml) + + @staticmethod + def windows_configuration_to_xml(configuration): + xml = _XmlSerializer.data_to_xml( + [('ConfigurationSetType', configuration.configuration_set_type), + ('ComputerName', configuration.computer_name), + ('AdminPassword', configuration.admin_password), + ('ResetPasswordOnFirstLogon', + configuration.reset_password_on_first_logon, + _lower), + ('EnableAutomaticUpdates', + configuration.enable_automatic_updates, + _lower), + ('TimeZone', configuration.time_zone)]) + + if configuration.domain_join is not None: + xml += '' + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Domain', configuration.domain_join.credentials.domain), + ('Username', configuration.domain_join.credentials.username), + ('Password', configuration.domain_join.credentials.password)]) + xml += '' + xml += _XmlSerializer.data_to_xml( + [('JoinDomain', configuration.domain_join.join_domain), + ('MachineObjectOU', + configuration.domain_join.machine_object_ou)]) + xml += '' + if configuration.stored_certificate_settings is not None: + xml += '' + for cert in configuration.stored_certificate_settings: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('StoreLocation', cert.store_location), + ('StoreName', cert.store_name), + ('Thumbprint', cert.thumbprint)]) + xml += '' + xml += '' + if configuration.win_rm is not None: + xml += '' + for listener in configuration.win_rm.listeners: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Protocol', listener.protocol), + ('CertificateThumbprint', listener.certificate_thumbprint)]) + xml += '' + xml += '' + xml += _XmlSerializer.data_to_xml( + [('AdminUsername', configuration.admin_username), + ('CustomData', configuration.custom_data, _encode_base64)]) + if configuration.additional_unattend_content and configuration.additional_unattend_content.passes: + xml += '' + for unattend_pass in configuration.additional_unattend_content.passes: + xml += _XmlSerializer.data_to_xml( + [('PassName', unattend_pass.pass_name)]) + if unattend_pass.components: + xml += '' + for comp in unattend_pass.components: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('ComponentName', comp.component_name)]) + if comp.component_settings: + xml += '' + for setting in comp.component_settings: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('SettingName', setting.setting_name), + ('Content', setting.content)]) + xml += '' + xml += '' + xml += '' + xml += '' + xml += '' + + return xml + + @staticmethod + def linux_configuration_to_xml(configuration): + xml = _XmlSerializer.data_to_xml( + [('ConfigurationSetType', configuration.configuration_set_type), + ('HostName', configuration.host_name), + ('UserName', configuration.user_name), + ('UserPassword', configuration.user_password), + ('DisableSshPasswordAuthentication', + configuration.disable_ssh_password_authentication, + _lower)]) + + if configuration.ssh is not None: + xml += '' + xml += '' + for key in configuration.ssh.public_keys: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Fingerprint', key.fingerprint), + ('Path', key.path)]) + xml += '' + xml += '' + xml += '' + for key in configuration.ssh.key_pairs: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Fingerprint', key.fingerprint), + ('Path', key.path)]) + xml += '' + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('CustomData', configuration.custom_data, _encode_base64)]) + + return xml + + @staticmethod + def network_configuration_to_xml(configuration): + xml = _XmlSerializer.data_to_xml( + [('ConfigurationSetType', configuration.configuration_set_type)]) + xml += '' + for endpoint in configuration.input_endpoints: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('LoadBalancedEndpointSetName', + endpoint.load_balanced_endpoint_set_name), + ('LocalPort', endpoint.local_port), + ('Name', endpoint.name), + ('Port', endpoint.port)]) + + if endpoint.load_balancer_probe.path or\ + endpoint.load_balancer_probe.port or\ + endpoint.load_balancer_probe.protocol: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Path', endpoint.load_balancer_probe.path), + ('Port', endpoint.load_balancer_probe.port), + ('Protocol', endpoint.load_balancer_probe.protocol)]) + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('Protocol', endpoint.protocol), + ('EnableDirectServerReturn', + endpoint.enable_direct_server_return, + _lower)]) + + xml += '' + xml += '' + xml += '' + for name in configuration.subnet_names: + xml += _XmlSerializer.data_to_xml([('SubnetName', name)]) + xml += '' + + if configuration.public_ips: + xml += '' + for public_ip in configuration.public_ips: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Name', public_ip.name), + ('IdleTimeoutInMinutes', public_ip.idle_timeout_in_minutes)]) + xml += '' + xml += '' + + return xml + + @staticmethod + def role_to_xml(availability_set_name, data_virtual_hard_disks, + network_configuration_set, os_virtual_hard_disk, role_name, + role_size, role_type, system_configuration_set, + resource_extension_references, + provision_guest_agent, vm_image_name, media_location): + xml = _XmlSerializer.data_to_xml([('RoleName', role_name), + ('RoleType', role_type)]) + + if system_configuration_set or network_configuration_set: + xml += '' + + if system_configuration_set is not None: + xml += '' + if isinstance(system_configuration_set, WindowsConfigurationSet): + xml += _XmlSerializer.windows_configuration_to_xml( + system_configuration_set) + elif isinstance(system_configuration_set, LinuxConfigurationSet): + xml += _XmlSerializer.linux_configuration_to_xml( + system_configuration_set) + xml += '' + + if network_configuration_set is not None: + xml += '' + xml += _XmlSerializer.network_configuration_to_xml( + network_configuration_set) + xml += '' + + xml += '' + + if resource_extension_references: + xml += '' + for ext in resource_extension_references: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('ReferenceName', ext.reference_name), + ('Publisher', ext.publisher), + ('Name', ext.name), + ('Version', ext.version)]) + if ext.resource_extension_parameter_values: + xml += '' + for val in ext.resource_extension_parameter_values: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Key', val.key), + ('Value', val.value), + ('Type', val.type)]) + xml += '' + xml += '' + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('VMImageName', vm_image_name), + ('MediaLocation', media_location), + ('AvailabilitySetName', availability_set_name)]) + + if data_virtual_hard_disks is not None: + xml += '' + for hd in data_virtual_hard_disks: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('HostCaching', hd.host_caching), + ('DiskLabel', hd.disk_label), + ('DiskName', hd.disk_name), + ('Lun', hd.lun), + ('LogicalDiskSizeInGB', hd.logical_disk_size_in_gb), + ('MediaLink', hd.media_link)]) + xml += '' + xml += '' + + if os_virtual_hard_disk is not None: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('HostCaching', os_virtual_hard_disk.host_caching), + ('DiskLabel', os_virtual_hard_disk.disk_label), + ('DiskName', os_virtual_hard_disk.disk_name), + ('MediaLink', os_virtual_hard_disk.media_link), + ('SourceImageName', os_virtual_hard_disk.source_image_name), + ('OS', os_virtual_hard_disk.os), + ('RemoteSourceImageLink', os_virtual_hard_disk.remote_source_image_link)]) + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('RoleSize', role_size), + ('ProvisionGuestAgent', provision_guest_agent, _lower)]) + + return xml + + @staticmethod + def add_role_to_xml(role_name, system_configuration_set, + os_virtual_hard_disk, role_type, + network_configuration_set, availability_set_name, + data_virtual_hard_disks, role_size, + resource_extension_references, provision_guest_agent, + vm_image_name, media_location): + xml = _XmlSerializer.role_to_xml( + availability_set_name, + data_virtual_hard_disks, + network_configuration_set, + os_virtual_hard_disk, + role_name, + role_size, + role_type, + system_configuration_set, + resource_extension_references, + provision_guest_agent, + vm_image_name, + media_location) + return _XmlSerializer.doc_from_xml('PersistentVMRole', xml) + + @staticmethod + def update_role_to_xml(role_name, os_virtual_hard_disk, role_type, + network_configuration_set, availability_set_name, + data_virtual_hard_disks, role_size, + resource_extension_references, + provision_guest_agent): + xml = _XmlSerializer.role_to_xml( + availability_set_name, + data_virtual_hard_disks, + network_configuration_set, + os_virtual_hard_disk, + role_name, + role_size, + role_type, + None, + resource_extension_references, + provision_guest_agent, + None, + None) + return _XmlSerializer.doc_from_xml('PersistentVMRole', xml) + + @staticmethod + def capture_role_to_xml(post_capture_action, target_image_name, + target_image_label, provisioning_configuration): + xml = _XmlSerializer.data_to_xml( + [('OperationType', 'CaptureRoleOperation'), + ('PostCaptureAction', post_capture_action)]) + + if provisioning_configuration is not None: + xml += '' + if isinstance(provisioning_configuration, WindowsConfigurationSet): + xml += _XmlSerializer.windows_configuration_to_xml( + provisioning_configuration) + elif isinstance(provisioning_configuration, LinuxConfigurationSet): + xml += _XmlSerializer.linux_configuration_to_xml( + provisioning_configuration) + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('TargetImageLabel', target_image_label), + ('TargetImageName', target_image_name)]) + + return _XmlSerializer.doc_from_xml('CaptureRoleOperation', xml) + + @staticmethod + def virtual_machine_deployment_to_xml(deployment_name, deployment_slot, + label, role_name, + system_configuration_set, + os_virtual_hard_disk, role_type, + network_configuration_set, + availability_set_name, + data_virtual_hard_disks, role_size, + virtual_network_name, + resource_extension_references, + provision_guest_agent, + vm_image_name, + media_location, + dns_servers, + reserved_ip_name): + xml = _XmlSerializer.data_to_xml([('Name', deployment_name), + ('DeploymentSlot', deployment_slot), + ('Label', label)]) + xml += '' + xml += '' + xml += _XmlSerializer.role_to_xml( + availability_set_name, + data_virtual_hard_disks, + network_configuration_set, + os_virtual_hard_disk, + role_name, + role_size, + role_type, + system_configuration_set, + resource_extension_references, + provision_guest_agent, + vm_image_name, + media_location) + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('VirtualNetworkName', virtual_network_name)]) + + if dns_servers: + xml += '' + for dns_server in dns_servers: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Name', dns_server.name), + ('Address', dns_server.address)]) + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('ReservedIPName', reserved_ip_name)]) + + return _XmlSerializer.doc_from_xml('Deployment', xml) + + @staticmethod + def capture_vm_image_to_xml(options): + return _XmlSerializer.doc_from_data( + 'CaptureRoleAsVMImageOperation ', + [('OperationType', 'CaptureRoleAsVMImageOperation'), + ('OSState', options.os_state), + ('VMImageName', options.vm_image_name), + ('VMImageLabel', options.vm_image_label), + ('Description', options.description), + ('Language', options.language), + ('ImageFamily', options.image_family), + ('RecommendedVMSize', options.recommended_vm_size)]) + + @staticmethod + def create_vm_image_to_xml(image): + xml = _XmlSerializer.data_to_xml( + [('Name', image.name), + ('Label', image.label), + ('Description', image.description)]) + + os_disk = image.os_disk_configuration + xml += '' + xml += _XmlSerializer.data_to_xml( + [('HostCaching', os_disk.host_caching), + ('OSState', os_disk.os_state), + ('OS', os_disk.os), + ('MediaLink', os_disk.media_link)]) + xml += '' + + if image.data_disk_configurations: + xml += '' + for data_disk in image.data_disk_configurations: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('HostCaching', data_disk.host_caching), + ('Lun', data_disk.lun), + ('MediaLink', data_disk.media_link), + ('LogicalDiskSizeInGB', data_disk.logical_disk_size_in_gb)]) + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('Language', image.language), + ('ImageFamily', image.image_family), + ('RecommendedVMSize', image.recommended_vm_size), + ('Eula', image.eula), + ('IconUri', image.icon_uri), + ('SmallIconUri', image.small_icon_uri), + ('PrivacyUri', image.privacy_uri), + ('PublishedDate', image.published_date), + ('ShowInGui', image.show_in_gui, _lower)]) + + return _XmlSerializer.doc_from_xml('VMImage', xml) + + @staticmethod + def update_vm_image_to_xml(image): + xml = _XmlSerializer.data_to_xml( + [('Label', image.label), + ('Description', image.description)]) + + os_disk = image.os_disk_configuration + xml += '' + xml += _XmlSerializer.data_to_xml( + [('HostCaching', os_disk.host_caching)]) + xml += '' + + xml += '' + for data_disk in image.data_disk_configurations: + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Name', data_disk.name), + ('HostCaching', data_disk.host_caching), + ('Lun', data_disk.lun)]) + xml += '' + xml += '' + + xml += _XmlSerializer.data_to_xml( + [('Language', image.language), + ('ImageFamily', image.image_family), + ('RecommendedVMSize', image.recommended_vm_size), + ('Eula', image.eula), + ('IconUri', image.icon_uri), + ('SmallIconUri', image.small_icon_uri), + ('PrivacyUri', image.privacy_uri), + ('PublishedDate', image.published_date), + ('ShowInGui', image.show_in_gui, _lower)]) + + return _XmlSerializer.doc_from_xml('VMImage', xml) + + @staticmethod + def create_website_to_xml(webspace_name, website_name, geo_region, plan, + host_names, compute_mode, server_farm, site_mode): + xml = '' + for host_name in host_names: + xml += '{0}'.format(host_name) + xml += '' + xml += _XmlSerializer.data_to_xml( + [('Name', website_name), + ('ComputeMode', compute_mode), + ('ServerFarm', server_farm), + ('SiteMode', site_mode)]) + xml += '' + xml += _XmlSerializer.data_to_xml( + [('GeoRegion', geo_region), + ('Name', webspace_name), + ('Plan', plan)]) + xml += '' + return _XmlSerializer.doc_from_xml('Site', xml) + + @staticmethod + def create_reserved_ip_to_xml(name, label, location): + return _XmlSerializer.doc_from_data( + 'ReservedIP', + [('Name', name), + ('Label', label), + ('Location', location)]) + + @staticmethod + def dns_server_to_xml(name, address): + return _XmlSerializer.doc_from_data( + 'DnsServer', + [('Name', name), + ('Address', address)]) + + @staticmethod + def role_instances_to_xml(role_instances): + xml = '' + for name in role_instances: + xml += _XmlSerializer.data_to_xml([('Name', name)]) + return _XmlSerializer.doc_from_xml('RoleInstances ', xml) + + @staticmethod + def data_to_xml(data): + return _data_to_xml(data) + + @staticmethod + def doc_from_xml(document_element_name, inner_xml): + '''Wraps the specified xml in an xml root element with default azure + namespaces''' + xml = ''.join(['<', document_element_name, + ' xmlns:i="http://www.w3.org/2001/XMLSchema-instance"', + ' xmlns="http://schemas.microsoft.com/windowsazure">']) + xml += inner_xml + xml += ''.join(['']) + return xml + + @staticmethod + def doc_from_data(document_element_name, data, extended_properties=None): + xml = _XmlSerializer.data_to_xml(data) + if extended_properties is not None: + xml += _XmlSerializer.extended_properties_dict_to_xml_fragment( + extended_properties) + return _XmlSerializer.doc_from_xml(document_element_name, xml) + + @staticmethod + def extended_properties_dict_to_xml_fragment(extended_properties): + xml = '' + if extended_properties is not None and len(extended_properties) > 0: + xml += '' + for key, val in extended_properties.items(): + xml += ''.join(['', + '', + _str(key), + '', + '', + _str(val), + '', + '']) + xml += '' + return xml + + +class _SqlManagementXmlSerializer(object): + + @staticmethod + def create_server_to_xml(admin_login, admin_password, location): + return _SqlManagementXmlSerializer.doc_from_data( + 'Server', + [('AdministratorLogin', admin_login), + ('AdministratorLoginPassword', admin_password), + ('Location', location)], + 'http://schemas.microsoft.com/sqlazure/2010/12/') + + @staticmethod + def set_server_admin_password_to_xml(admin_password): + return _SqlManagementXmlSerializer.doc_from_xml( + 'AdministratorLoginPassword', admin_password, + 'http://schemas.microsoft.com/sqlazure/2010/12/') + + @staticmethod + def create_firewall_rule_to_xml(name, start_ip_address, end_ip_address): + return _SqlManagementXmlSerializer.doc_from_data( + 'ServiceResource', + [('Name', name), + ('StartIPAddress', start_ip_address), + ('EndIPAddress', end_ip_address)]) + + @staticmethod + def update_firewall_rule_to_xml(name, start_ip_address, end_ip_address): + return _SqlManagementXmlSerializer.doc_from_data( + 'ServiceResource', + [('Name', name), + ('StartIPAddress', start_ip_address), + ('EndIPAddress', end_ip_address)]) + + @staticmethod + def create_database_to_xml(name, service_objective_id, edition, collation_name, + max_size_bytes): + return _SqlManagementXmlSerializer.doc_from_data( + 'ServiceResource', + [('Name', name), + ('Edition', edition), + ('CollationName', collation_name), + ('MaxSizeBytes', max_size_bytes), + ('ServiceObjectiveId', service_objective_id)]) + + @staticmethod + def update_database_to_xml(name, service_objective_id, edition, + max_size_bytes): + return _SqlManagementXmlSerializer.doc_from_data( + 'ServiceResource', + [('Name', name), + ('Edition', edition), + ('MaxSizeBytes', max_size_bytes), + ('ServiceObjectiveId', service_objective_id)]) + + @staticmethod + def xml_to_create_server_response(xmlstr): + xmldoc = minidom.parseString(xmlstr) + element = xmldoc.documentElement + + response = CreateServerResponse() + response.server_name = element.firstChild.nodeValue + response.fully_qualified_domain_name = element.getAttribute('FullyQualifiedDomainName') + + return response + + @staticmethod + def data_to_xml(data): + return _data_to_xml(data) + + @staticmethod + def doc_from_xml(document_element_name, inner_xml, + xmlns='http://schemas.microsoft.com/windowsazure'): + '''Wraps the specified xml in an xml root element with default azure + namespaces''' + xml = ''.join(['<', document_element_name, + ' xmlns="{0}">'.format(xmlns)]) + xml += inner_xml + xml += ''.join(['']) + return xml + + @staticmethod + def doc_from_data(document_element_name, data, + xmlns='http://schemas.microsoft.com/windowsazure'): + xml = _SqlManagementXmlSerializer.data_to_xml(data) + return _SqlManagementXmlSerializer.doc_from_xml( + document_element_name, xml, xmlns) + + +def _parse_bool(value): + if value.lower() == 'true': + return True + return False + + +class _ServiceBusManagementXmlSerializer(object): + + @staticmethod + def namespace_to_xml(region): + '''Converts a service bus namespace description to xml + + The xml format: + + + + + West US + + + + ''' + body = '' + body += ''.join(['', region, '']) + body += '' + + return _create_entry(body) + + @staticmethod + def xml_to_namespace(xmlstr): + '''Converts xml response to service bus namespace + + The xml format for namespace: + +uuid:00000000-0000-0000-0000-000000000000;id=0000000 +myunittests +2012-08-22T16:48:10Z + + + myunittests + West US + 0000000000000000000000000000000000000000000= + Active + 2012-08-22T16:48:10.217Z + https://myunittests-sb.accesscontrol.windows.net/ + https://myunittests.servicebus.windows.net/ + Endpoint=sb://myunittests.servicebus.windows.net/;SharedSecretIssuer=owner;SharedSecretValue=0000000000000000000000000000000000000000000= + 00000000000000000000000000000000 + true + + + + ''' + xmldoc = minidom.parseString(xmlstr) + namespace = ServiceBusNamespace() + + mappings = ( + ('Name', 'name', None), + ('Region', 'region', None), + ('DefaultKey', 'default_key', None), + ('Status', 'status', None), + ('CreatedAt', 'created_at', None), + ('AcsManagementEndpoint', 'acs_management_endpoint', None), + ('ServiceBusEndpoint', 'servicebus_endpoint', None), + ('ConnectionString', 'connection_string', None), + ('SubscriptionId', 'subscription_id', None), + ('Enabled', 'enabled', _parse_bool), + ) + + for desc in _get_children_from_path(xmldoc, + 'entry', + 'content', + 'NamespaceDescription'): + for xml_name, field_name, conversion_func in mappings: + node_value = _get_first_child_node_value(desc, xml_name) + if node_value is not None: + if conversion_func is not None: + node_value = conversion_func(node_value) + setattr(namespace, field_name, node_value) + + return namespace + + @staticmethod + def xml_to_region(xmlstr): + '''Converts xml response to service bus region + + The xml format for region: + +uuid:157c311f-081f-4b4a-a0ba-a8f990ffd2a3;id=1756759 + +2013-04-10T18:25:29Z + + + East Asia + East Asia + + + + ''' + xmldoc = minidom.parseString(xmlstr) + region = ServiceBusRegion() + + for desc in _get_children_from_path(xmldoc, 'entry', 'content', + 'RegionCodeDescription'): + node_value = _get_first_child_node_value(desc, 'Code') + if node_value is not None: + region.code = node_value + node_value = _get_first_child_node_value(desc, 'FullName') + if node_value is not None: + region.fullname = node_value + + return region + + @staticmethod + def xml_to_namespace_availability(xmlstr): + '''Converts xml response to service bus namespace availability + + The xml format: + + + uuid:9fc7c652-1856-47ab-8d74-cd31502ea8e6;id=3683292 + + 2013-04-16T03:03:37Z + + + false + + + + ''' + xmldoc = minidom.parseString(xmlstr) + availability = AvailabilityResponse() + + for desc in _get_children_from_path(xmldoc, 'entry', 'content', + 'NamespaceAvailability'): + node_value = _get_first_child_node_value(desc, 'Result') + if node_value is not None: + availability.result = _parse_bool(node_value) + + return availability + + @staticmethod + def odata_converter(data, str_type): + ''' Convert odata type + http://www.odata.org/documentation/odata-version-2-0/overview#AbstractTypeSystem + To be completed + ''' + if not str_type: + return _str(data) + if str_type in ["Edm.Single", "Edm.Double"]: + return float(data) + elif "Edm.Int" in str_type: + return int(data) + else: + return _str(data) + + @staticmethod + def xml_to_metrics(xmlstr, object_type): + '''Converts xml response to service bus metrics objects + + The xml format for MetricProperties + + https://sbgm.windows.net/Metrics(\'listeners.active\') + + <updated>2014-10-09T11:56:50Z</updated> + <author> + <name/> + </author> + <content type="application/xml"> + <m:properties> + <d:Name>listeners.active</d:Name> + <d:PrimaryAggregation>Average</d:PrimaryAggregation> + <d:Unit>Count</d:Unit> + <d:DisplayName>Active listeners</d:DisplayName> + </m:properties> + </content> +</entry> + + The xml format for MetricValues + <entry> + <id>https://sbgm.windows.net/MetricValues(datetime\'2014-10-02T00:00:00Z\')</id> + <title/> + <updated>2014-10-09T18:38:28Z</updated> + <author> + <name/> + </author> + <content type="application/xml"> + <m:properties> + <d:Timestamp m:type="Edm.DateTime">2014-10-02T00:00:00Z</d:Timestamp> + <d:Min m:type="Edm.Int64">-118</d:Min> + <d:Max m:type="Edm.Int64">15</d:Max> + <d:Average m:type="Edm.Single">-78.44444</d:Average> + <d:Total m:type="Edm.Int64">0</d:Total> + </m:properties> + </content> + </entry> + ''' + + xmldoc = minidom.parseString(xmlstr) + return_obj = object_type() + + members = dict(vars(return_obj)) + + # Only one entry here + for xml_entry in _get_children_from_path(xmldoc, + 'entry'): + for node in _get_children_from_path(xml_entry, + 'content', + 'm:properties'): + for name in members: + xml_name = "d:" + _get_serialization_name(name) + children = _get_child_nodes(node, xml_name) + if not children: + continue + child = children[0] + node_type = child.getAttributeNS("http://schemas.microsoft.com/ado/2007/08/dataservices/metadata", 'type') + node_value = _ServiceBusManagementXmlSerializer.odata_converter(child.firstChild.nodeValue, node_type) + setattr(return_obj, name, node_value) + for name, value in _get_entry_properties_from_node(xml_entry, + include_id=True, + use_title_as_id=False).items(): + if name in members: + continue # Do not override if already members + setattr(return_obj, name, value) + return return_obj + +from azure.servicemanagement.servicemanagementservice import ( + ServiceManagementService) +from azure.servicemanagement.servicebusmanagementservice import ( + ServiceBusManagementService) +from azure.servicemanagement.websitemanagementservice import ( + WebsiteManagementService) diff --git a/awx/lib/site-packages/azure/servicemanagement/schedulermanagementservice.py b/awx/lib/site-packages/azure/servicemanagement/schedulermanagementservice.py new file mode 100644 index 0000000000..ee57b6f2ed --- /dev/null +++ b/awx/lib/site-packages/azure/servicemanagement/schedulermanagementservice.py @@ -0,0 +1,70 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + MANAGEMENT_HOST, + _str + ) +from azure.servicemanagement import ( + CloudServices, + ) +from azure.servicemanagement.servicemanagementclient import ( + _ServiceManagementClient, + ) + +class SchedulerManagementService(_ServiceManagementClient): + ''' Note that this class is a preliminary work on Scheduler + management. Since it lack a lot a features, final version + can be slightly different from the current one. + ''' + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + ''' + Initializes the scheduler management service. + + subscription_id: Subscription to manage. + cert_file: + Path to .pem certificate file (httplib), or location of the + certificate in your Personal certificate store (winhttp) in the + CURRENT_USER\my\CertificateName format. + If a request_session is specified, then this is unused. + host: Live ServiceClient URL. Defaults to Azure public cloud. + request_session: + Session object to use for http requests. If this is specified, it + replaces the default use of httplib or winhttp. Also, the cert_file + parameter is unused when a session is passed in. + The session object handles authentication, and as such can support + multiple types of authentication: .pem certificate, oauth. + For example, you can pass in a Session instance from the requests + library. To use .pem certificate authentication with requests + library, set the path to the .pem file on the session.cert + attribute. + ''' + super(SchedulerManagementService, self).__init__( + subscription_id, cert_file, host, request_session) + + #--Operations for scheduler ---------------------------------------- + def list_cloud_services(self): + ''' + List the cloud services for scheduling defined on the account. + ''' + return self._perform_get(self._get_list_cloud_services_path(), + CloudServices) + + + #--Helper functions -------------------------------------------------- + def _get_list_cloud_services_path(self): + return self._get_path('cloudservices', None) + diff --git a/awx/lib/site-packages/azure/servicemanagement/servicebusmanagementservice.py b/awx/lib/site-packages/azure/servicemanagement/servicebusmanagementservice.py index 51d734e367..6008e171be 100644 --- a/awx/lib/site-packages/azure/servicemanagement/servicebusmanagementservice.py +++ b/awx/lib/site-packages/azure/servicemanagement/servicebusmanagementservice.py @@ -1,113 +1,534 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import ( - MANAGEMENT_HOST, - _convert_response_to_feeds, - _str, - _validate_not_none, - ) -from azure.servicemanagement import ( - _ServiceBusManagementXmlSerializer, - ) -from azure.servicemanagement.servicemanagementclient import ( - _ServiceManagementClient, - ) - - -class ServiceBusManagementService(_ServiceManagementClient): - - def __init__(self, subscription_id=None, cert_file=None, - host=MANAGEMENT_HOST): - super(ServiceBusManagementService, self).__init__( - subscription_id, cert_file, host) - - #--Operations for service bus ---------------------------------------- - def get_regions(self): - ''' - Get list of available service bus regions. - ''' - response = self._perform_get( - self._get_path('services/serviceBus/Regions/', None), - None) - - return _convert_response_to_feeds( - response, - _ServiceBusManagementXmlSerializer.xml_to_region) - - def list_namespaces(self): - ''' - List the service bus namespaces defined on the account. - ''' - response = self._perform_get( - self._get_path('services/serviceBus/Namespaces/', None), - None) - - return _convert_response_to_feeds( - response, - _ServiceBusManagementXmlSerializer.xml_to_namespace) - - def get_namespace(self, name): - ''' - Get details about a specific namespace. - - name: Name of the service bus namespace. - ''' - response = self._perform_get( - self._get_path('services/serviceBus/Namespaces', name), - None) - - return _ServiceBusManagementXmlSerializer.xml_to_namespace( - response.body) - - def create_namespace(self, name, region): - ''' - Create a new service bus namespace. - - name: Name of the service bus namespace to create. - region: Region to create the namespace in. - ''' - _validate_not_none('name', name) - - return self._perform_put( - self._get_path('services/serviceBus/Namespaces', name), - _ServiceBusManagementXmlSerializer.namespace_to_xml(region)) - - def delete_namespace(self, name): - ''' - Delete a service bus namespace. - - name: Name of the service bus namespace to delete. - ''' - _validate_not_none('name', name) - - return self._perform_delete( - self._get_path('services/serviceBus/Namespaces', name), - None) - - def check_namespace_availability(self, name): - ''' - Checks to see if the specified service bus namespace is available, or - if it has already been taken. - - name: Name of the service bus namespace to validate. - ''' - _validate_not_none('name', name) - - response = self._perform_get( - self._get_path('services/serviceBus/CheckNamespaceAvailability', - None) + '/?namespace=' + _str(name), None) - - return _ServiceBusManagementXmlSerializer.xml_to_namespace_availability( - response.body) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + MANAGEMENT_HOST, + _convert_response_to_feeds, + _str, + _validate_not_none, + _convert_xml_to_windows_azure_object, +) +from azure.servicemanagement import ( + _ServiceBusManagementXmlSerializer, + QueueDescription, + TopicDescription, + NotificationHubDescription, + RelayDescription, + MetricProperties, + MetricValues, + MetricRollups, +) +from azure.servicemanagement.servicemanagementclient import ( + _ServiceManagementClient, +) + +from functools import partial + +X_MS_VERSION = '2012-03-01' + +class ServiceBusManagementService(_ServiceManagementClient): + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + ''' + Initializes the service bus management service. + + subscription_id: Subscription to manage. + cert_file: + Path to .pem certificate file (httplib), or location of the + certificate in your Personal certificate store (winhttp) in the + CURRENT_USER\my\CertificateName format. + If a request_session is specified, then this is unused. + host: Live ServiceClient URL. Defaults to Azure public cloud. + request_session: + Session object to use for http requests. If this is specified, it + replaces the default use of httplib or winhttp. Also, the cert_file + parameter is unused when a session is passed in. + The session object handles authentication, and as such can support + multiple types of authentication: .pem certificate, oauth. + For example, you can pass in a Session instance from the requests + library. To use .pem certificate authentication with requests + library, set the path to the .pem file on the session.cert + attribute. + ''' + super(ServiceBusManagementService, self).__init__( + subscription_id, cert_file, host, request_session) + self.x_ms_version = X_MS_VERSION + + # Operations for service bus ---------------------------------------- + def get_regions(self): + ''' + Get list of available service bus regions. + ''' + response = self._perform_get( + self._get_path('services/serviceBus/Regions/', None), + None) + + return _convert_response_to_feeds( + response, + _ServiceBusManagementXmlSerializer.xml_to_region) + + def list_namespaces(self): + ''' + List the service bus namespaces defined on the account. + ''' + response = self._perform_get( + self._get_path('services/serviceBus/Namespaces/', None), + None) + + return _convert_response_to_feeds( + response, + _ServiceBusManagementXmlSerializer.xml_to_namespace) + + def get_namespace(self, name): + ''' + Get details about a specific namespace. + + name: Name of the service bus namespace. + ''' + response = self._perform_get( + self._get_path('services/serviceBus/Namespaces', name), + None) + + return _ServiceBusManagementXmlSerializer.xml_to_namespace( + response.body) + + def create_namespace(self, name, region): + ''' + Create a new service bus namespace. + + name: Name of the service bus namespace to create. + region: Region to create the namespace in. + ''' + _validate_not_none('name', name) + + return self._perform_put( + self._get_path('services/serviceBus/Namespaces', name), + _ServiceBusManagementXmlSerializer.namespace_to_xml(region)) + + def delete_namespace(self, name): + ''' + Delete a service bus namespace. + + name: Name of the service bus namespace to delete. + ''' + _validate_not_none('name', name) + + return self._perform_delete( + self._get_path('services/serviceBus/Namespaces', name), + None) + + def check_namespace_availability(self, name): + ''' + Checks to see if the specified service bus namespace is available, or + if it has already been taken. + + name: Name of the service bus namespace to validate. + ''' + _validate_not_none('name', name) + + response = self._perform_get( + self._get_path('services/serviceBus/CheckNamespaceAvailability', + None) + '/?namespace=' + _str(name), None) + + return _ServiceBusManagementXmlSerializer.xml_to_namespace_availability( + response.body) + + def list_queues(self, name): + ''' + Enumerates the queues in the service namespace. + + name: Name of the service bus namespace. + ''' + _validate_not_none('name', name) + + response = self._perform_get( + self._get_list_queues_path(name), + None) + + return _convert_response_to_feeds(response, + partial(_convert_xml_to_windows_azure_object, + azure_type=QueueDescription)) + + def list_topics(self, name): + ''' + Retrieves the topics in the service namespace. + + name: Name of the service bus namespace. + ''' + response = self._perform_get( + self._get_list_topics_path(name), + None) + + return _convert_response_to_feeds(response, + partial(_convert_xml_to_windows_azure_object, + azure_type=TopicDescription)) + + def list_notification_hubs(self, name): + ''' + Retrieves the notification hubs in the service namespace. + + name: Name of the service bus namespace. + ''' + response = self._perform_get( + self._get_list_notification_hubs_path(name), + None) + + return _convert_response_to_feeds(response, + partial(_convert_xml_to_windows_azure_object, + azure_type=NotificationHubDescription)) + + def list_relays(self, name): + ''' + Retrieves the relays in the service namespace. + + name: Name of the service bus namespace. + ''' + response = self._perform_get( + self._get_list_relays_path(name), + None) + + return _convert_response_to_feeds(response, + partial(_convert_xml_to_windows_azure_object, + azure_type=RelayDescription)) + + def get_supported_metrics_queue(self, name, queue_name): + ''' + Retrieves the list of supported metrics for this namespace and queue + + name: Name of the service bus namespace. + queue_name: Name of the service bus queue in this namespace. + ''' + response = self._perform_get( + self._get_get_supported_metrics_queue_path(name, queue_name), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricProperties)) + + def get_supported_metrics_topic(self, name, topic_name): + ''' + Retrieves the list of supported metrics for this namespace and topic + + name: Name of the service bus namespace. + topic_name: Name of the service bus queue in this namespace. + ''' + response = self._perform_get( + self._get_get_supported_metrics_topic_path(name, topic_name), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricProperties)) + + def get_supported_metrics_notification_hub(self, name, hub_name): + ''' + Retrieves the list of supported metrics for this namespace and topic + + name: Name of the service bus namespace. + hub_name: Name of the service bus notification hub in this namespace. + ''' + response = self._perform_get( + self._get_get_supported_metrics_hub_path(name, hub_name), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricProperties)) + + def get_supported_metrics_relay(self, name, relay_name): + ''' + Retrieves the list of supported metrics for this namespace and relay + + name: Name of the service bus namespace. + relay_name: Name of the service bus relay in this namespace. + ''' + response = self._perform_get( + self._get_get_supported_metrics_relay_path(name, relay_name), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricProperties)) + + def get_metrics_data_queue(self, name, queue_name, metric, rollup, filter_expresssion): + ''' + Retrieves the list of supported metrics for this namespace and queue + + name: Name of the service bus namespace. + queue_name: Name of the service bus queue in this namespace. + metric: name of a supported metric + rollup: name of a supported rollup + filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'" + ''' + response = self._perform_get( + self._get_get_metrics_data_queue_path(name, queue_name, metric, rollup, filter_expresssion), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricValues)) + + def get_metrics_data_topic(self, name, topic_name, metric, rollup, filter_expresssion): + ''' + Retrieves the list of supported metrics for this namespace and topic + + name: Name of the service bus namespace. + topic_name: Name of the service bus queue in this namespace. + metric: name of a supported metric + rollup: name of a supported rollup + filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'" + ''' + response = self._perform_get( + self._get_get_metrics_data_topic_path(name, topic_name, metric, rollup, filter_expresssion), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricValues)) + + def get_metrics_data_notification_hub(self, name, hub_name, metric, rollup, filter_expresssion): + ''' + Retrieves the list of supported metrics for this namespace and topic + + name: Name of the service bus namespace. + hub_name: Name of the service bus notification hub in this namespace. + metric: name of a supported metric + rollup: name of a supported rollup + filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'" + ''' + response = self._perform_get( + self._get_get_metrics_data_hub_path(name, hub_name, metric, rollup, filter_expresssion), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricValues)) + + def get_metrics_data_relay(self, name, relay_name, metric, rollup, filter_expresssion): + ''' + Retrieves the list of supported metrics for this namespace and relay + + name: Name of the service bus namespace. + relay_name: Name of the service bus relay in this namespace. + metric: name of a supported metric + rollup: name of a supported rollup + filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'" + ''' + response = self._perform_get( + self._get_get_metrics_data_relay_path(name, relay_name, metric, rollup, filter_expresssion), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricValues)) + + def get_metrics_rollups_queue(self, name, queue_name, metric): + ''' + This operation gets rollup data for Service Bus metrics queue. + Rollup data includes the time granularity for the telemetry aggregation as well as + the retention settings for each time granularity. + + name: Name of the service bus namespace. + queue_name: Name of the service bus queue in this namespace. + metric: name of a supported metric + ''' + response = self._perform_get( + self._get_get_metrics_rollup_queue_path(name, queue_name, metric), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricRollups)) + + def get_metrics_rollups_topic(self, name, topic_name, metric): + ''' + This operation gets rollup data for Service Bus metrics topic. + Rollup data includes the time granularity for the telemetry aggregation as well as + the retention settings for each time granularity. + + name: Name of the service bus namespace. + topic_name: Name of the service bus queue in this namespace. + metric: name of a supported metric + ''' + response = self._perform_get( + self._get_get_metrics_rollup_topic_path(name, topic_name, metric), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricRollups)) + + def get_metrics_rollups_notification_hub(self, name, hub_name, metric): + ''' + This operation gets rollup data for Service Bus metrics notification hub. + Rollup data includes the time granularity for the telemetry aggregation as well as + the retention settings for each time granularity. + + name: Name of the service bus namespace. + hub_name: Name of the service bus notification hub in this namespace. + metric: name of a supported metric + ''' + response = self._perform_get( + self._get_get_metrics_rollup_hub_path(name, hub_name, metric), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricRollups)) + + def get_metrics_rollups_relay(self, name, relay_name, metric): + ''' + This operation gets rollup data for Service Bus metrics relay. + Rollup data includes the time granularity for the telemetry aggregation as well as + the retention settings for each time granularity. + + name: Name of the service bus namespace. + relay_name: Name of the service bus relay in this namespace. + metric: name of a supported metric + ''' + response = self._perform_get( + self._get_get_metrics_rollup_relay_path(name, relay_name, metric), + None) + + return _convert_response_to_feeds(response, + partial(_ServiceBusManagementXmlSerializer.xml_to_metrics, + object_type=MetricRollups)) + + + # Helper functions -------------------------------------------------- + def _get_list_queues_path(self, namespace_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Queues' + + def _get_list_topics_path(self, namespace_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Topics' + + def _get_list_notification_hubs_path(self, namespace_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/NotificationHubs' + + def _get_list_relays_path(self, namespace_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Relays' + + def _get_get_supported_metrics_queue_path(self, namespace_name, queue_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Queues/' + _str(queue_name) + '/Metrics' + + def _get_get_supported_metrics_topic_path(self, namespace_name, topic_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Topics/' + _str(topic_name) + '/Metrics' + + def _get_get_supported_metrics_hub_path(self, namespace_name, hub_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/NotificationHubs/' + _str(hub_name) + '/Metrics' + + def _get_get_supported_metrics_relay_path(self, namespace_name, queue_name): + return self._get_path('services/serviceBus/Namespaces/', + namespace_name) + '/Relays/' + _str(queue_name) + '/Metrics' + + def _get_get_metrics_data_queue_path(self, namespace_name, queue_name, metric, rollup, filter_expr): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Queues/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups/', + _str(rollup), + '/Values?', + filter_expr + ]) + + def _get_get_metrics_data_topic_path(self, namespace_name, queue_name, metric, rollup, filter_expr): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Topics/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups/', + _str(rollup), + '/Values?', + filter_expr + ]) + + def _get_get_metrics_data_hub_path(self, namespace_name, queue_name, metric, rollup, filter_expr): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/NotificationHubs/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups/', + _str(rollup), + '/Values?', + filter_expr + ]) + + def _get_get_metrics_data_relay_path(self, namespace_name, queue_name, metric, rollup, filter_expr): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Relays/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups/', + _str(rollup), + '/Values?', + filter_expr + ]) + + def _get_get_metrics_rollup_queue_path(self, namespace_name, queue_name, metric): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Queues/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups', + ]) + + def _get_get_metrics_rollup_topic_path(self, namespace_name, queue_name, metric): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Topics/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups', + ]) + + def _get_get_metrics_rollup_hub_path(self, namespace_name, queue_name, metric): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/NotificationHubs/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups', + ]) + + def _get_get_metrics_rollup_relay_path(self, namespace_name, queue_name, metric): + return "".join([ + self._get_path('services/serviceBus/Namespaces/', namespace_name), + '/Relays/', + _str(queue_name), + '/Metrics/', + _str(metric), + '/Rollups', + ]) diff --git a/awx/lib/site-packages/azure/servicemanagement/servicemanagementclient.py b/awx/lib/site-packages/azure/servicemanagement/servicemanagementclient.py index 53ab03e508..808956e0bb 100644 --- a/awx/lib/site-packages/azure/servicemanagement/servicemanagementclient.py +++ b/awx/lib/site-packages/azure/servicemanagement/servicemanagementclient.py @@ -1,166 +1,258 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import os - -from azure import ( - WindowsAzureError, - MANAGEMENT_HOST, - _get_request_body, - _parse_response, - _str, - _update_request_uri_query, - ) -from azure.http import ( - HTTPError, - HTTPRequest, - ) -from azure.http.httpclient import _HTTPClient -from azure.servicemanagement import ( - AZURE_MANAGEMENT_CERTFILE, - AZURE_MANAGEMENT_SUBSCRIPTIONID, - _management_error_handler, - _parse_response_for_async_op, - _update_management_header, - ) - - -class _ServiceManagementClient(object): - - def __init__(self, subscription_id=None, cert_file=None, - host=MANAGEMENT_HOST): - self.requestid = None - self.subscription_id = subscription_id - self.cert_file = cert_file - self.host = host - - if not self.cert_file: - if AZURE_MANAGEMENT_CERTFILE in os.environ: - self.cert_file = os.environ[AZURE_MANAGEMENT_CERTFILE] - - if not self.subscription_id: - if AZURE_MANAGEMENT_SUBSCRIPTIONID in os.environ: - self.subscription_id = os.environ[ - AZURE_MANAGEMENT_SUBSCRIPTIONID] - - if not self.cert_file or not self.subscription_id: - raise WindowsAzureError( - 'You need to provide subscription id and certificate file') - - self._httpclient = _HTTPClient( - service_instance=self, cert_file=self.cert_file) - self._filter = self._httpclient.perform_request - - def with_filter(self, filter): - '''Returns a new service which will process requests with the - specified filter. Filtering operations can include logging, automatic - retrying, etc... The filter is a lambda which receives the HTTPRequest - and another lambda. The filter can perform any pre-processing on the - request, pass it off to the next lambda, and then perform any - post-processing on the response.''' - res = type(self)(self.subscription_id, self.cert_file, self.host) - old_filter = self._filter - - def new_filter(request): - return filter(request, old_filter) - - res._filter = new_filter - return res - - def set_proxy(self, host, port, user=None, password=None): - ''' - Sets the proxy server host and port for the HTTP CONNECT Tunnelling. - - host: Address of the proxy. Ex: '192.168.0.100' - port: Port of the proxy. Ex: 6000 - user: User for proxy authorization. - password: Password for proxy authorization. - ''' - self._httpclient.set_proxy(host, port, user, password) - - #--Helper functions -------------------------------------------------- - def _perform_request(self, request): - try: - resp = self._filter(request) - except HTTPError as ex: - return _management_error_handler(ex) - - return resp - - def _perform_get(self, path, response_type): - request = HTTPRequest() - request.method = 'GET' - request.host = self.host - request.path = path - request.path, request.query = _update_request_uri_query(request) - request.headers = _update_management_header(request) - response = self._perform_request(request) - - if response_type is not None: - return _parse_response(response, response_type) - - return response - - def _perform_put(self, path, body, async=False): - request = HTTPRequest() - request.method = 'PUT' - request.host = self.host - request.path = path - request.body = _get_request_body(body) - request.path, request.query = _update_request_uri_query(request) - request.headers = _update_management_header(request) - response = self._perform_request(request) - - if async: - return _parse_response_for_async_op(response) - - return None - - def _perform_post(self, path, body, response_type=None, async=False): - request = HTTPRequest() - request.method = 'POST' - request.host = self.host - request.path = path - request.body = _get_request_body(body) - request.path, request.query = _update_request_uri_query(request) - request.headers = _update_management_header(request) - response = self._perform_request(request) - - if response_type is not None: - return _parse_response(response, response_type) - - if async: - return _parse_response_for_async_op(response) - - return None - - def _perform_delete(self, path, async=False): - request = HTTPRequest() - request.method = 'DELETE' - request.host = self.host - request.path = path - request.path, request.query = _update_request_uri_query(request) - request.headers = _update_management_header(request) - response = self._perform_request(request) - - if async: - return _parse_response_for_async_op(response) - - return None - - def _get_path(self, resource, name): - path = '/' + self.subscription_id + '/' + resource - if name is not None: - path += '/' + _str(name) - return path +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import os + +from azure import ( + WindowsAzureError, + MANAGEMENT_HOST, + _get_request_body, + _parse_response, + _str, + _update_request_uri_query, + ) +from azure.http import ( + HTTPError, + HTTPRequest, + ) +from azure.http.httpclient import _HTTPClient +from azure.servicemanagement import ( + AZURE_MANAGEMENT_CERTFILE, + AZURE_MANAGEMENT_SUBSCRIPTIONID, + _management_error_handler, + parse_response_for_async_op, + X_MS_VERSION, + ) + + +class _ServiceManagementClient(object): + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + self.requestid = None + self.subscription_id = subscription_id + self.cert_file = cert_file + self.host = host + self.request_session = request_session + self.x_ms_version = X_MS_VERSION + self.content_type = 'application/atom+xml;type=entry;charset=utf-8' + + if not self.cert_file and not request_session: + if AZURE_MANAGEMENT_CERTFILE in os.environ: + self.cert_file = os.environ[AZURE_MANAGEMENT_CERTFILE] + + if not self.subscription_id: + if AZURE_MANAGEMENT_SUBSCRIPTIONID in os.environ: + self.subscription_id = os.environ[ + AZURE_MANAGEMENT_SUBSCRIPTIONID] + + if not self.request_session: + if not self.cert_file or not self.subscription_id: + raise WindowsAzureError( + 'You need to provide subscription id and certificate file') + + self._httpclient = _HTTPClient( + service_instance=self, cert_file=self.cert_file, + request_session=self.request_session) + self._filter = self._httpclient.perform_request + + def with_filter(self, filter): + '''Returns a new service which will process requests with the + specified filter. Filtering operations can include logging, automatic + retrying, etc... The filter is a lambda which receives the HTTPRequest + and another lambda. The filter can perform any pre-processing on the + request, pass it off to the next lambda, and then perform any + post-processing on the response.''' + res = type(self)(self.subscription_id, self.cert_file, self.host, + self.request_session) + old_filter = self._filter + + def new_filter(request): + return filter(request, old_filter) + + res._filter = new_filter + return res + + def set_proxy(self, host, port, user=None, password=None): + ''' + Sets the proxy server host and port for the HTTP CONNECT Tunnelling. + + host: Address of the proxy. Ex: '192.168.0.100' + port: Port of the proxy. Ex: 6000 + user: User for proxy authorization. + password: Password for proxy authorization. + ''' + self._httpclient.set_proxy(host, port, user, password) + + def perform_get(self, path, x_ms_version=None): + ''' + Performs a GET request and returns the response. + + path: + Path to the resource. + Ex: '/<subscription-id>/services/hostedservices/<service-name>' + x_ms_version: + If specified, this is used for the x-ms-version header. + Otherwise, self.x_ms_version is used. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self.host + request.path = path + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_management_header(request, x_ms_version) + response = self._perform_request(request) + + return response + + def perform_put(self, path, body, x_ms_version=None): + ''' + Performs a PUT request and returns the response. + + path: + Path to the resource. + Ex: '/<subscription-id>/services/hostedservices/<service-name>' + body: + Body for the PUT request. + x_ms_version: + If specified, this is used for the x-ms-version header. + Otherwise, self.x_ms_version is used. + ''' + request = HTTPRequest() + request.method = 'PUT' + request.host = self.host + request.path = path + request.body = _get_request_body(body) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_management_header(request, x_ms_version) + response = self._perform_request(request) + + return response + + def perform_post(self, path, body, x_ms_version=None): + ''' + Performs a POST request and returns the response. + + path: + Path to the resource. + Ex: '/<subscription-id>/services/hostedservices/<service-name>' + body: + Body for the POST request. + x_ms_version: + If specified, this is used for the x-ms-version header. + Otherwise, self.x_ms_version is used. + ''' + request = HTTPRequest() + request.method = 'POST' + request.host = self.host + request.path = path + request.body = _get_request_body(body) + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_management_header(request, x_ms_version) + response = self._perform_request(request) + + return response + + def perform_delete(self, path, x_ms_version=None): + ''' + Performs a DELETE request and returns the response. + + path: + Path to the resource. + Ex: '/<subscription-id>/services/hostedservices/<service-name>' + x_ms_version: + If specified, this is used for the x-ms-version header. + Otherwise, self.x_ms_version is used. + ''' + request = HTTPRequest() + request.method = 'DELETE' + request.host = self.host + request.path = path + request.path, request.query = _update_request_uri_query(request) + request.headers = self._update_management_header(request, x_ms_version) + response = self._perform_request(request) + + return response + + #--Helper functions -------------------------------------------------- + def _perform_request(self, request): + try: + resp = self._filter(request) + except HTTPError as ex: + return _management_error_handler(ex) + + return resp + + def _update_management_header(self, request, x_ms_version): + ''' Add additional headers for management. ''' + + if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: + request.headers.append(('Content-Length', str(len(request.body)))) + + # append additional headers base on the service + request.headers.append(('x-ms-version', x_ms_version or self.x_ms_version)) + + # if it is not GET or HEAD request, must set content-type. + if not request.method in ['GET', 'HEAD']: + for name, _ in request.headers: + if 'content-type' == name.lower(): + break + else: + request.headers.append( + ('Content-Type', + self.content_type)) + + return request.headers + + def _perform_get(self, path, response_type, x_ms_version=None): + response = self.perform_get(path, x_ms_version) + + if response_type is not None: + return _parse_response(response, response_type) + + return response + + def _perform_put(self, path, body, async=False, x_ms_version=None): + response = self.perform_put(path, body, x_ms_version) + + if async: + return parse_response_for_async_op(response) + + return None + + def _perform_post(self, path, body, response_type=None, async=False, + x_ms_version=None): + response = self.perform_post(path, body, x_ms_version) + + if response_type is not None: + return _parse_response(response, response_type) + + if async: + return parse_response_for_async_op(response) + + return None + + def _perform_delete(self, path, async=False, x_ms_version=None): + response = self.perform_delete(path, x_ms_version) + + if async: + return parse_response_for_async_op(response) + + return None + + def _get_path(self, resource, name): + path = '/' + self.subscription_id + '/' + resource + if name is not None: + path += '/' + _str(name) + return path diff --git a/awx/lib/site-packages/azure/servicemanagement/servicemanagementservice.py b/awx/lib/site-packages/azure/servicemanagement/servicemanagementservice.py index 13fcf3d76b..651ffdae53 100644 --- a/awx/lib/site-packages/azure/servicemanagement/servicemanagementservice.py +++ b/awx/lib/site-packages/azure/servicemanagement/servicemanagementservice.py @@ -1,1754 +1,2300 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import ( - WindowsAzureError, - MANAGEMENT_HOST, - _str, - _validate_not_none, - ) -from azure.servicemanagement import ( - AffinityGroups, - AffinityGroup, - AvailabilityResponse, - Certificate, - Certificates, - DataVirtualHardDisk, - Deployment, - Disk, - Disks, - Locations, - Operation, - HostedService, - HostedServices, - Images, - OperatingSystems, - OperatingSystemFamilies, - OSImage, - PersistentVMRole, - StorageService, - StorageServices, - Subscription, - SubscriptionCertificate, - SubscriptionCertificates, - VirtualNetworkSites, - _XmlSerializer, - ) -from azure.servicemanagement.servicemanagementclient import ( - _ServiceManagementClient, - ) - -class ServiceManagementService(_ServiceManagementClient): - - def __init__(self, subscription_id=None, cert_file=None, - host=MANAGEMENT_HOST): - super(ServiceManagementService, self).__init__( - subscription_id, cert_file, host) - - #--Operations for storage accounts ----------------------------------- - def list_storage_accounts(self): - ''' - Lists the storage accounts available under the current subscription. - ''' - return self._perform_get(self._get_storage_service_path(), - StorageServices) - - def get_storage_account_properties(self, service_name): - ''' - Returns system properties for the specified storage account. - - service_name: Name of the storage service account. - ''' - _validate_not_none('service_name', service_name) - return self._perform_get(self._get_storage_service_path(service_name), - StorageService) - - def get_storage_account_keys(self, service_name): - ''' - Returns the primary and secondary access keys for the specified - storage account. - - service_name: Name of the storage service account. - ''' - _validate_not_none('service_name', service_name) - return self._perform_get( - self._get_storage_service_path(service_name) + '/keys', - StorageService) - - def regenerate_storage_account_keys(self, service_name, key_type): - ''' - Regenerates the primary or secondary access key for the specified - storage account. - - service_name: Name of the storage service account. - key_type: - Specifies which key to regenerate. Valid values are: - Primary, Secondary - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('key_type', key_type) - return self._perform_post( - self._get_storage_service_path( - service_name) + '/keys?action=regenerate', - _XmlSerializer.regenerate_keys_to_xml( - key_type), - StorageService) - - def create_storage_account(self, service_name, description, label, - affinity_group=None, location=None, - geo_replication_enabled=True, - extended_properties=None): - ''' - Creates a new storage account in Windows Azure. - - service_name: - A name for the storage account that is unique within Windows Azure. - Storage account names must be between 3 and 24 characters in length - and use numbers and lower-case letters only. - description: - A description for the storage account. The description may be up - to 1024 characters in length. - label: - A name for the storage account. The name may be up to 100 - characters in length. The name can be used to identify the storage - account for your tracking purposes. - affinity_group: - The name of an existing affinity group in the specified - subscription. You can specify either a location or affinity_group, - but not both. - location: - The location where the storage account is created. You can specify - either a location or affinity_group, but not both. - geo_replication_enabled: - Specifies whether the storage account is created with the - geo-replication enabled. If the element is not included in the - request body, the default value is true. If set to true, the data - in the storage account is replicated across more than one - geographic location so as to enable resilience in the face of - catastrophic service loss. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('description', description) - _validate_not_none('label', label) - if affinity_group is None and location is None: - raise WindowsAzureError( - 'location or affinity_group must be specified') - if affinity_group is not None and location is not None: - raise WindowsAzureError( - 'Only one of location or affinity_group needs to be specified') - return self._perform_post( - self._get_storage_service_path(), - _XmlSerializer.create_storage_service_input_to_xml( - service_name, - description, - label, - affinity_group, - location, - geo_replication_enabled, - extended_properties), - async=True) - - def update_storage_account(self, service_name, description=None, - label=None, geo_replication_enabled=None, - extended_properties=None): - ''' - Updates the label, the description, and enables or disables the - geo-replication status for a storage account in Windows Azure. - - service_name: Name of the storage service account. - description: - A description for the storage account. The description may be up - to 1024 characters in length. - label: - A name for the storage account. The name may be up to 100 - characters in length. The name can be used to identify the storage - account for your tracking purposes. - geo_replication_enabled: - Specifies whether the storage account is created with the - geo-replication enabled. If the element is not included in the - request body, the default value is true. If set to true, the data - in the storage account is replicated across more than one - geographic location so as to enable resilience in the face of - catastrophic service loss. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - return self._perform_put( - self._get_storage_service_path(service_name), - _XmlSerializer.update_storage_service_input_to_xml( - description, - label, - geo_replication_enabled, - extended_properties)) - - def delete_storage_account(self, service_name): - ''' - Deletes the specified storage account from Windows Azure. - - service_name: Name of the storage service account. - ''' - _validate_not_none('service_name', service_name) - return self._perform_delete( - self._get_storage_service_path(service_name)) - - def check_storage_account_name_availability(self, service_name): - ''' - Checks to see if the specified storage account name is available, or - if it has already been taken. - - service_name: Name of the storage service account. - ''' - _validate_not_none('service_name', service_name) - return self._perform_get( - self._get_storage_service_path() + - '/operations/isavailable/' + - _str(service_name) + '', - AvailabilityResponse) - - #--Operations for hosted services ------------------------------------ - def list_hosted_services(self): - ''' - Lists the hosted services available under the current subscription. - ''' - return self._perform_get(self._get_hosted_service_path(), - HostedServices) - - def get_hosted_service_properties(self, service_name, embed_detail=False): - ''' - Retrieves system properties for the specified hosted service. These - properties include the service name and service type; the name of the - affinity group to which the service belongs, or its location if it is - not part of an affinity group; and optionally, information on the - service's deployments. - - service_name: Name of the hosted service. - embed_detail: - When True, the management service returns properties for all - deployments of the service, as well as for the service itself. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('embed_detail', embed_detail) - return self._perform_get( - self._get_hosted_service_path(service_name) + - '?embed-detail=' + - _str(embed_detail).lower(), - HostedService) - - def create_hosted_service(self, service_name, label, description=None, - location=None, affinity_group=None, - extended_properties=None): - ''' - Creates a new hosted service in Windows Azure. - - service_name: - A name for the hosted service that is unique within Windows Azure. - This name is the DNS prefix name and can be used to access the - hosted service. - label: - A name for the hosted service. The name can be up to 100 characters - in length. The name can be used to identify the storage account for - your tracking purposes. - description: - A description for the hosted service. The description can be up to - 1024 characters in length. - location: - The location where the hosted service will be created. You can - specify either a location or affinity_group, but not both. - affinity_group: - The name of an existing affinity group associated with this - subscription. This name is a GUID and can be retrieved by examining - the name element of the response body returned by - list_affinity_groups. You can specify either a location or - affinity_group, but not both. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('label', label) - if affinity_group is None and location is None: - raise WindowsAzureError( - 'location or affinity_group must be specified') - if affinity_group is not None and location is not None: - raise WindowsAzureError( - 'Only one of location or affinity_group needs to be specified') - return self._perform_post(self._get_hosted_service_path(), - _XmlSerializer.create_hosted_service_to_xml( - service_name, - label, - description, - location, - affinity_group, - extended_properties)) - - def update_hosted_service(self, service_name, label=None, description=None, - extended_properties=None): - ''' - Updates the label and/or the description for a hosted service in - Windows Azure. - - service_name: Name of the hosted service. - label: - A name for the hosted service. The name may be up to 100 characters - in length. You must specify a value for either Label or - Description, or for both. It is recommended that the label be - unique within the subscription. The name can be used - identify the hosted service for your tracking purposes. - description: - A description for the hosted service. The description may be up to - 1024 characters in length. You must specify a value for either - Label or Description, or for both. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - return self._perform_put(self._get_hosted_service_path(service_name), - _XmlSerializer.update_hosted_service_to_xml( - label, - description, - extended_properties)) - - def delete_hosted_service(self, service_name): - ''' - Deletes the specified hosted service from Windows Azure. - - service_name: Name of the hosted service. - ''' - _validate_not_none('service_name', service_name) - return self._perform_delete(self._get_hosted_service_path(service_name)) - - def get_deployment_by_slot(self, service_name, deployment_slot): - ''' - Returns configuration information, status, and system properties for - a deployment. - - service_name: Name of the hosted service. - deployment_slot: - The environment to which the hosted service is deployed. Valid - values are: staging, production - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_slot', deployment_slot) - return self._perform_get( - self._get_deployment_path_using_slot( - service_name, deployment_slot), - Deployment) - - def get_deployment_by_name(self, service_name, deployment_name): - ''' - Returns configuration information, status, and system properties for a - deployment. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - return self._perform_get( - self._get_deployment_path_using_name( - service_name, deployment_name), - Deployment) - - def create_deployment(self, service_name, deployment_slot, name, - package_url, label, configuration, - start_deployment=False, - treat_warnings_as_error=False, - extended_properties=None): - ''' - Uploads a new service package and creates a new deployment on staging - or production. - - service_name: Name of the hosted service. - deployment_slot: - The environment to which the hosted service is deployed. Valid - values are: staging, production - name: - The name for the deployment. The deployment name must be unique - among other deployments for the hosted service. - package_url: - A URL that refers to the location of the service package in the - Blob service. The service package can be located either in a - storage account beneath the same subscription or a Shared Access - Signature (SAS) URI from any storage account. - label: - A name for the hosted service. The name can be up to 100 characters - in length. It is recommended that the label be unique within the - subscription. The name can be used to identify the hosted service - for your tracking purposes. - configuration: - The base-64 encoded service configuration file for the deployment. - start_deployment: - Indicates whether to start the deployment immediately after it is - created. If false, the service model is still deployed to the - virtual machines but the code is not run immediately. Instead, the - service is Suspended until you call Update Deployment Status and - set the status to Running, at which time the service will be - started. A deployed service still incurs charges, even if it is - suspended. - treat_warnings_as_error: - Indicates whether to treat package validation warnings as errors. - If set to true, the Created Deployment operation fails if there - are validation warnings on the service package. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_slot', deployment_slot) - _validate_not_none('name', name) - _validate_not_none('package_url', package_url) - _validate_not_none('label', label) - _validate_not_none('configuration', configuration) - return self._perform_post( - self._get_deployment_path_using_slot( - service_name, deployment_slot), - _XmlSerializer.create_deployment_to_xml( - name, - package_url, - label, - configuration, - start_deployment, - treat_warnings_as_error, - extended_properties), - async=True) - - def delete_deployment(self, service_name, deployment_name): - ''' - Deletes the specified deployment. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - return self._perform_delete( - self._get_deployment_path_using_name( - service_name, deployment_name), - async=True) - - def swap_deployment(self, service_name, production, source_deployment): - ''' - Initiates a virtual IP swap between the staging and production - deployment environments for a service. If the service is currently - running in the staging environment, it will be swapped to the - production environment. If it is running in the production - environment, it will be swapped to staging. - - service_name: Name of the hosted service. - production: The name of the production deployment. - source_deployment: The name of the source deployment. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('production', production) - _validate_not_none('source_deployment', source_deployment) - return self._perform_post(self._get_hosted_service_path(service_name), - _XmlSerializer.swap_deployment_to_xml( - production, source_deployment), - async=True) - - def change_deployment_configuration(self, service_name, deployment_name, - configuration, - treat_warnings_as_error=False, - mode='Auto', extended_properties=None): - ''' - Initiates a change to the deployment configuration. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - configuration: - The base-64 encoded service configuration file for the deployment. - treat_warnings_as_error: - Indicates whether to treat package validation warnings as errors. - If set to true, the Created Deployment operation fails if there - are validation warnings on the service package. - mode: - If set to Manual, WalkUpgradeDomain must be called to apply the - update. If set to Auto, the Windows Azure platform will - automatically apply the update To each upgrade domain for the - service. Possible values are: Auto, Manual - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('configuration', configuration) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + '/?comp=config', - _XmlSerializer.change_deployment_to_xml( - configuration, - treat_warnings_as_error, - mode, - extended_properties), - async=True) - - def update_deployment_status(self, service_name, deployment_name, status): - ''' - Initiates a change in deployment status. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - status: - The change to initiate to the deployment status. Possible values - include: Running, Suspended - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('status', status) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + '/?comp=status', - _XmlSerializer.update_deployment_status_to_xml( - status), - async=True) - - def upgrade_deployment(self, service_name, deployment_name, mode, - package_url, configuration, label, force, - role_to_upgrade=None, extended_properties=None): - ''' - Initiates an upgrade. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - mode: - If set to Manual, WalkUpgradeDomain must be called to apply the - update. If set to Auto, the Windows Azure platform will - automatically apply the update To each upgrade domain for the - service. Possible values are: Auto, Manual - package_url: - A URL that refers to the location of the service package in the - Blob service. The service package can be located either in a - storage account beneath the same subscription or a Shared Access - Signature (SAS) URI from any storage account. - configuration: - The base-64 encoded service configuration file for the deployment. - label: - A name for the hosted service. The name can be up to 100 characters - in length. It is recommended that the label be unique within the - subscription. The name can be used to identify the hosted service - for your tracking purposes. - force: - Specifies whether the rollback should proceed even when it will - cause local data to be lost from some role instances. True if the - rollback should proceed; otherwise false if the rollback should - fail. - role_to_upgrade: The name of the specific role to upgrade. - extended_properties: - Dictionary containing name/value pairs of storage account - properties. You can have a maximum of 50 extended property - name/value pairs. The maximum length of the Name element is 64 - characters, only alphanumeric characters and underscores are valid - in the Name, and the name must start with a letter. The value has - a maximum length of 255 characters. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('mode', mode) - _validate_not_none('package_url', package_url) - _validate_not_none('configuration', configuration) - _validate_not_none('label', label) - _validate_not_none('force', force) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + '/?comp=upgrade', - _XmlSerializer.upgrade_deployment_to_xml( - mode, - package_url, - configuration, - label, - role_to_upgrade, - force, - extended_properties), - async=True) - - def walk_upgrade_domain(self, service_name, deployment_name, - upgrade_domain): - ''' - Specifies the next upgrade domain to be walked during manual in-place - upgrade or configuration change. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - upgrade_domain: - An integer value that identifies the upgrade domain to walk. - Upgrade domains are identified with a zero-based index: the first - upgrade domain has an ID of 0, the second has an ID of 1, and so on. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('upgrade_domain', upgrade_domain) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + '/?comp=walkupgradedomain', - _XmlSerializer.walk_upgrade_domain_to_xml( - upgrade_domain), - async=True) - - def rollback_update_or_upgrade(self, service_name, deployment_name, mode, - force): - ''' - Cancels an in progress configuration change (update) or upgrade and - returns the deployment to its state before the upgrade or - configuration change was started. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - mode: - Specifies whether the rollback should proceed automatically. - auto - The rollback proceeds without further user input. - manual - You must call the Walk Upgrade Domain operation to - apply the rollback to each upgrade domain. - force: - Specifies whether the rollback should proceed even when it will - cause local data to be lost from some role instances. True if the - rollback should proceed; otherwise false if the rollback should - fail. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('mode', mode) - _validate_not_none('force', force) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + '/?comp=rollback', - _XmlSerializer.rollback_upgrade_to_xml( - mode, force), - async=True) - - def reboot_role_instance(self, service_name, deployment_name, - role_instance_name): - ''' - Requests a reboot of a role instance that is running in a deployment. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - role_instance_name: The name of the role instance. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_instance_name', role_instance_name) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + \ - '/roleinstances/' + _str(role_instance_name) + \ - '?comp=reboot', - '', - async=True) - - def reimage_role_instance(self, service_name, deployment_name, - role_instance_name): - ''' - Requests a reimage of a role instance that is running in a deployment. - - service_name: Name of the hosted service. - deployment_name: The name of the deployment. - role_instance_name: The name of the role instance. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_instance_name', role_instance_name) - return self._perform_post( - self._get_deployment_path_using_name( - service_name, deployment_name) + \ - '/roleinstances/' + _str(role_instance_name) + \ - '?comp=reimage', - '', - async=True) - - def check_hosted_service_name_availability(self, service_name): - ''' - Checks to see if the specified hosted service name is available, or if - it has already been taken. - - service_name: Name of the hosted service. - ''' - _validate_not_none('service_name', service_name) - return self._perform_get( - '/' + self.subscription_id + - '/services/hostedservices/operations/isavailable/' + - _str(service_name) + '', - AvailabilityResponse) - - #--Operations for service certificates ------------------------------- - def list_service_certificates(self, service_name): - ''' - Lists all of the service certificates associated with the specified - hosted service. - - service_name: Name of the hosted service. - ''' - _validate_not_none('service_name', service_name) - return self._perform_get( - '/' + self.subscription_id + '/services/hostedservices/' + - _str(service_name) + '/certificates', - Certificates) - - def get_service_certificate(self, service_name, thumbalgorithm, thumbprint): - ''' - Returns the public data for the specified X.509 certificate associated - with a hosted service. - - service_name: Name of the hosted service. - thumbalgorithm: The algorithm for the certificate's thumbprint. - thumbprint: The hexadecimal representation of the thumbprint. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('thumbalgorithm', thumbalgorithm) - _validate_not_none('thumbprint', thumbprint) - return self._perform_get( - '/' + self.subscription_id + '/services/hostedservices/' + - _str(service_name) + '/certificates/' + - _str(thumbalgorithm) + '-' + _str(thumbprint) + '', - Certificate) - - def add_service_certificate(self, service_name, data, certificate_format, - password): - ''' - Adds a certificate to a hosted service. - - service_name: Name of the hosted service. - data: The base-64 encoded form of the pfx file. - certificate_format: - The service certificate format. The only supported value is pfx. - password: The certificate password. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('data', data) - _validate_not_none('certificate_format', certificate_format) - _validate_not_none('password', password) - return self._perform_post( - '/' + self.subscription_id + '/services/hostedservices/' + - _str(service_name) + '/certificates', - _XmlSerializer.certificate_file_to_xml( - data, certificate_format, password), - async=True) - - def delete_service_certificate(self, service_name, thumbalgorithm, - thumbprint): - ''' - Deletes a service certificate from the certificate store of a hosted - service. - - service_name: Name of the hosted service. - thumbalgorithm: The algorithm for the certificate's thumbprint. - thumbprint: The hexadecimal representation of the thumbprint. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('thumbalgorithm', thumbalgorithm) - _validate_not_none('thumbprint', thumbprint) - return self._perform_delete( - '/' + self.subscription_id + '/services/hostedservices/' + - _str(service_name) + '/certificates/' + - _str(thumbalgorithm) + '-' + _str(thumbprint), - async=True) - - #--Operations for management certificates ---------------------------- - def list_management_certificates(self): - ''' - The List Management Certificates operation lists and returns basic - information about all of the management certificates associated with - the specified subscription. Management certificates, which are also - known as subscription certificates, authenticate clients attempting to - connect to resources associated with your Windows Azure subscription. - ''' - return self._perform_get('/' + self.subscription_id + '/certificates', - SubscriptionCertificates) - - def get_management_certificate(self, thumbprint): - ''' - The Get Management Certificate operation retrieves information about - the management certificate with the specified thumbprint. Management - certificates, which are also known as subscription certificates, - authenticate clients attempting to connect to resources associated - with your Windows Azure subscription. - - thumbprint: The thumbprint value of the certificate. - ''' - _validate_not_none('thumbprint', thumbprint) - return self._perform_get( - '/' + self.subscription_id + '/certificates/' + _str(thumbprint), - SubscriptionCertificate) - - def add_management_certificate(self, public_key, thumbprint, data): - ''' - The Add Management Certificate operation adds a certificate to the - list of management certificates. Management certificates, which are - also known as subscription certificates, authenticate clients - attempting to connect to resources associated with your Windows Azure - subscription. - - public_key: - A base64 representation of the management certificate public key. - thumbprint: - The thumb print that uniquely identifies the management - certificate. - data: The certificate's raw data in base-64 encoded .cer format. - ''' - _validate_not_none('public_key', public_key) - _validate_not_none('thumbprint', thumbprint) - _validate_not_none('data', data) - return self._perform_post( - '/' + self.subscription_id + '/certificates', - _XmlSerializer.subscription_certificate_to_xml( - public_key, thumbprint, data)) - - def delete_management_certificate(self, thumbprint): - ''' - The Delete Management Certificate operation deletes a certificate from - the list of management certificates. Management certificates, which - are also known as subscription certificates, authenticate clients - attempting to connect to resources associated with your Windows Azure - subscription. - - thumbprint: - The thumb print that uniquely identifies the management - certificate. - ''' - _validate_not_none('thumbprint', thumbprint) - return self._perform_delete( - '/' + self.subscription_id + '/certificates/' + _str(thumbprint)) - - #--Operations for affinity groups ------------------------------------ - def list_affinity_groups(self): - ''' - Lists the affinity groups associated with the specified subscription. - ''' - return self._perform_get( - '/' + self.subscription_id + '/affinitygroups', - AffinityGroups) - - def get_affinity_group_properties(self, affinity_group_name): - ''' - Returns the system properties associated with the specified affinity - group. - - affinity_group_name: The name of the affinity group. - ''' - _validate_not_none('affinity_group_name', affinity_group_name) - return self._perform_get( - '/' + self.subscription_id + '/affinitygroups/' + - _str(affinity_group_name) + '', - AffinityGroup) - - def create_affinity_group(self, name, label, location, description=None): - ''' - Creates a new affinity group for the specified subscription. - - name: A name for the affinity group that is unique to the subscription. - label: - A name for the affinity group. The name can be up to 100 characters - in length. - location: - The data center location where the affinity group will be created. - To list available locations, use the list_location function. - description: - A description for the affinity group. The description can be up to - 1024 characters in length. - ''' - _validate_not_none('name', name) - _validate_not_none('label', label) - _validate_not_none('location', location) - return self._perform_post( - '/' + self.subscription_id + '/affinitygroups', - _XmlSerializer.create_affinity_group_to_xml(name, - label, - description, - location)) - - def update_affinity_group(self, affinity_group_name, label, - description=None): - ''' - Updates the label and/or the description for an affinity group for the - specified subscription. - - affinity_group_name: The name of the affinity group. - label: - A name for the affinity group. The name can be up to 100 characters - in length. - description: - A description for the affinity group. The description can be up to - 1024 characters in length. - ''' - _validate_not_none('affinity_group_name', affinity_group_name) - _validate_not_none('label', label) - return self._perform_put( - '/' + self.subscription_id + '/affinitygroups/' + - _str(affinity_group_name), - _XmlSerializer.update_affinity_group_to_xml(label, description)) - - def delete_affinity_group(self, affinity_group_name): - ''' - Deletes an affinity group in the specified subscription. - - affinity_group_name: The name of the affinity group. - ''' - _validate_not_none('affinity_group_name', affinity_group_name) - return self._perform_delete('/' + self.subscription_id + \ - '/affinitygroups/' + \ - _str(affinity_group_name)) - - #--Operations for locations ------------------------------------------ - def list_locations(self): - ''' - Lists all of the data center locations that are valid for your - subscription. - ''' - return self._perform_get('/' + self.subscription_id + '/locations', - Locations) - - #--Operations for tracking asynchronous requests --------------------- - def get_operation_status(self, request_id): - ''' - Returns the status of the specified operation. After calling an - asynchronous operation, you can call Get Operation Status to determine - whether the operation has succeeded, failed, or is still in progress. - - request_id: The request ID for the request you wish to track. - ''' - _validate_not_none('request_id', request_id) - return self._perform_get( - '/' + self.subscription_id + '/operations/' + _str(request_id), - Operation) - - #--Operations for retrieving operating system information ------------ - def list_operating_systems(self): - ''' - Lists the versions of the guest operating system that are currently - available in Windows Azure. - ''' - return self._perform_get( - '/' + self.subscription_id + '/operatingsystems', - OperatingSystems) - - def list_operating_system_families(self): - ''' - Lists the guest operating system families available in Windows Azure, - and also lists the operating system versions available for each family. - ''' - return self._perform_get( - '/' + self.subscription_id + '/operatingsystemfamilies', - OperatingSystemFamilies) - - #--Operations for retrieving subscription history -------------------- - def get_subscription(self): - ''' - Returns account and resource allocation information on the specified - subscription. - ''' - return self._perform_get('/' + self.subscription_id + '', - Subscription) - - #--Operations for virtual machines ----------------------------------- - def get_role(self, service_name, deployment_name, role_name): - ''' - Retrieves the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - return self._perform_get( - self._get_role_path(service_name, deployment_name, role_name), - PersistentVMRole) - - def create_virtual_machine_deployment(self, service_name, deployment_name, - deployment_slot, label, role_name, - system_config, os_virtual_hard_disk, - network_config=None, - availability_set_name=None, - data_virtual_hard_disks=None, - role_size=None, - role_type='PersistentVMRole', - virtual_network_name=None): - ''' - Provisions a virtual machine based on the supplied configuration. - - service_name: Name of the hosted service. - deployment_name: - The name for the deployment. The deployment name must be unique - among other deployments for the hosted service. - deployment_slot: - The environment to which the hosted service is deployed. Valid - values are: staging, production - label: - Specifies an identifier for the deployment. The label can be up to - 100 characters long. The label can be used for tracking purposes. - role_name: The name of the role. - system_config: - Contains the metadata required to provision a virtual machine from - a Windows or Linux OS image. Use an instance of - WindowsConfigurationSet or LinuxConfigurationSet. - os_virtual_hard_disk: - Contains the parameters Windows Azure uses to create the operating - system disk for the virtual machine. - network_config: - Encapsulates the metadata required to create the virtual network - configuration for a virtual machine. If you do not include a - network configuration set you will not be able to access the VM - through VIPs over the internet. If your virtual machine belongs to - a virtual network you can not specify which subnet address space - it resides under. - availability_set_name: - Specifies the name of an availability set to which to add the - virtual machine. This value controls the virtual machine - allocation in the Windows Azure environment. Virtual machines - specified in the same availability set are allocated to different - nodes to maximize availability. - data_virtual_hard_disks: - Contains the parameters Windows Azure uses to create a data disk - for a virtual machine. - role_size: - The size of the virtual machine to allocate. The default value is - Small. Possible values are: ExtraSmall, Small, Medium, Large, - ExtraLarge. The specified value must be compatible with the disk - selected in the OSVirtualHardDisk values. - role_type: - The type of the role for the virtual machine. The only supported - value is PersistentVMRole. - virtual_network_name: - Specifies the name of an existing virtual network to which the - deployment will belong. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('deployment_slot', deployment_slot) - _validate_not_none('label', label) - _validate_not_none('role_name', role_name) - _validate_not_none('system_config', system_config) - _validate_not_none('os_virtual_hard_disk', os_virtual_hard_disk) - return self._perform_post( - self._get_deployment_path_using_name(service_name), - _XmlSerializer.virtual_machine_deployment_to_xml( - deployment_name, - deployment_slot, - label, - role_name, - system_config, - os_virtual_hard_disk, - role_type, - network_config, - availability_set_name, - data_virtual_hard_disks, - role_size, - virtual_network_name), - async=True) - - def add_role(self, service_name, deployment_name, role_name, system_config, - os_virtual_hard_disk, network_config=None, - availability_set_name=None, data_virtual_hard_disks=None, - role_size=None, role_type='PersistentVMRole'): - ''' - Adds a virtual machine to an existing deployment. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - system_config: - Contains the metadata required to provision a virtual machine from - a Windows or Linux OS image. Use an instance of - WindowsConfigurationSet or LinuxConfigurationSet. - os_virtual_hard_disk: - Contains the parameters Windows Azure uses to create the operating - system disk for the virtual machine. - network_config: - Encapsulates the metadata required to create the virtual network - configuration for a virtual machine. If you do not include a - network configuration set you will not be able to access the VM - through VIPs over the internet. If your virtual machine belongs to - a virtual network you can not specify which subnet address space - it resides under. - availability_set_name: - Specifies the name of an availability set to which to add the - virtual machine. This value controls the virtual machine allocation - in the Windows Azure environment. Virtual machines specified in the - same availability set are allocated to different nodes to maximize - availability. - data_virtual_hard_disks: - Contains the parameters Windows Azure uses to create a data disk - for a virtual machine. - role_size: - The size of the virtual machine to allocate. The default value is - Small. Possible values are: ExtraSmall, Small, Medium, Large, - ExtraLarge. The specified value must be compatible with the disk - selected in the OSVirtualHardDisk values. - role_type: - The type of the role for the virtual machine. The only supported - value is PersistentVMRole. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('system_config', system_config) - _validate_not_none('os_virtual_hard_disk', os_virtual_hard_disk) - return self._perform_post( - self._get_role_path(service_name, deployment_name), - _XmlSerializer.add_role_to_xml( - role_name, - system_config, - os_virtual_hard_disk, - role_type, - network_config, - availability_set_name, - data_virtual_hard_disks, - role_size), - async=True) - - def update_role(self, service_name, deployment_name, role_name, - os_virtual_hard_disk=None, network_config=None, - availability_set_name=None, data_virtual_hard_disks=None, - role_size=None, role_type='PersistentVMRole'): - ''' - Updates the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - os_virtual_hard_disk: - Contains the parameters Windows Azure uses to create the operating - system disk for the virtual machine. - network_config: - Encapsulates the metadata required to create the virtual network - configuration for a virtual machine. If you do not include a - network configuration set you will not be able to access the VM - through VIPs over the internet. If your virtual machine belongs to - a virtual network you can not specify which subnet address space - it resides under. - availability_set_name: - Specifies the name of an availability set to which to add the - virtual machine. This value controls the virtual machine allocation - in the Windows Azure environment. Virtual machines specified in the - same availability set are allocated to different nodes to maximize - availability. - data_virtual_hard_disks: - Contains the parameters Windows Azure uses to create a data disk - for a virtual machine. - role_size: - The size of the virtual machine to allocate. The default value is - Small. Possible values are: ExtraSmall, Small, Medium, Large, - ExtraLarge. The specified value must be compatible with the disk - selected in the OSVirtualHardDisk values. - role_type: - The type of the role for the virtual machine. The only supported - value is PersistentVMRole. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - return self._perform_put( - self._get_role_path(service_name, deployment_name, role_name), - _XmlSerializer.update_role_to_xml( - role_name, - os_virtual_hard_disk, - role_type, - network_config, - availability_set_name, - data_virtual_hard_disks, - role_size), - async=True) - - def delete_role(self, service_name, deployment_name, role_name): - ''' - Deletes the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - return self._perform_delete( - self._get_role_path(service_name, deployment_name, role_name), - async=True) - - def capture_role(self, service_name, deployment_name, role_name, - post_capture_action, target_image_name, - target_image_label, provisioning_configuration=None): - ''' - The Capture Role operation captures a virtual machine image to your - image gallery. From the captured image, you can create additional - customized virtual machines. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - post_capture_action: - Specifies the action after capture operation completes. Possible - values are: Delete, Reprovision. - target_image_name: - Specifies the image name of the captured virtual machine. - target_image_label: - Specifies the friendly name of the captured virtual machine. - provisioning_configuration: - Use an instance of WindowsConfigurationSet or LinuxConfigurationSet. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('post_capture_action', post_capture_action) - _validate_not_none('target_image_name', target_image_name) - _validate_not_none('target_image_label', target_image_label) - return self._perform_post( - self._get_role_instance_operations_path( - service_name, deployment_name, role_name), - _XmlSerializer.capture_role_to_xml( - post_capture_action, - target_image_name, - target_image_label, - provisioning_configuration), - async=True) - - def start_role(self, service_name, deployment_name, role_name): - ''' - Starts the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - return self._perform_post( - self._get_role_instance_operations_path( - service_name, deployment_name, role_name), - _XmlSerializer.start_role_operation_to_xml(), - async=True) - - def start_roles(self, service_name, deployment_name, role_names): - ''' - Starts the specified virtual machines. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_names: The names of the roles, as an enumerable of strings. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_names', role_names) - return self._perform_post( - self._get_roles_operations_path(service_name, deployment_name), - _XmlSerializer.start_roles_operation_to_xml(role_names), - async=True) - - def restart_role(self, service_name, deployment_name, role_name): - ''' - Restarts the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - return self._perform_post( - self._get_role_instance_operations_path( - service_name, deployment_name, role_name), - _XmlSerializer.restart_role_operation_to_xml( - ), - async=True) - - def shutdown_role(self, service_name, deployment_name, role_name, - post_shutdown_action='Stopped'): - ''' - Shuts down the specified virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - post_shutdown_action: - Specifies how the Virtual Machine should be shut down. Values are: - Stopped - Shuts down the Virtual Machine but retains the compute - resources. You will continue to be billed for the resources - that the stopped machine uses. - StoppedDeallocated - Shuts down the Virtual Machine and releases the compute - resources. You are not billed for the compute resources that - this Virtual Machine uses. If a static Virtual Network IP - address is assigned to the Virtual Machine, it is reserved. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('post_shutdown_action', post_shutdown_action) - return self._perform_post( - self._get_role_instance_operations_path( - service_name, deployment_name, role_name), - _XmlSerializer.shutdown_role_operation_to_xml(post_shutdown_action), - async=True) - - def shutdown_roles(self, service_name, deployment_name, role_names, - post_shutdown_action='Stopped'): - ''' - Shuts down the specified virtual machines. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_names: The names of the roles, as an enumerable of strings. - post_shutdown_action: - Specifies how the Virtual Machine should be shut down. Values are: - Stopped - Shuts down the Virtual Machine but retains the compute - resources. You will continue to be billed for the resources - that the stopped machine uses. - StoppedDeallocated - Shuts down the Virtual Machine and releases the compute - resources. You are not billed for the compute resources that - this Virtual Machine uses. If a static Virtual Network IP - address is assigned to the Virtual Machine, it is reserved. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_names', role_names) - _validate_not_none('post_shutdown_action', post_shutdown_action) - return self._perform_post( - self._get_roles_operations_path(service_name, deployment_name), - _XmlSerializer.shutdown_roles_operation_to_xml( - role_names, post_shutdown_action), - async=True) - - #--Operations for virtual machine images ----------------------------- - def list_os_images(self): - ''' - Retrieves a list of the OS images from the image repository. - ''' - return self._perform_get(self._get_image_path(), - Images) - - def get_os_image(self, image_name): - ''' - Retrieves an OS image from the image repository. - ''' - return self._perform_get(self._get_image_path(image_name), - OSImage) - - def add_os_image(self, label, media_link, name, os): - ''' - Adds an OS image that is currently stored in a storage account in your - subscription to the image repository. - - label: Specifies the friendly name of the image. - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the image is located. The blob location must - belong to a storage account in the subscription specified by the - <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - name: - Specifies a name for the OS image that Windows Azure uses to - identify the image when creating one or more virtual machines. - os: - The operating system type of the OS image. Possible values are: - Linux, Windows - ''' - _validate_not_none('label', label) - _validate_not_none('media_link', media_link) - _validate_not_none('name', name) - _validate_not_none('os', os) - return self._perform_post(self._get_image_path(), - _XmlSerializer.os_image_to_xml( - label, media_link, name, os), - async=True) - - def update_os_image(self, image_name, label, media_link, name, os): - ''' - Updates an OS image that in your image repository. - - image_name: The name of the image to update. - label: - Specifies the friendly name of the image to be updated. You cannot - use this operation to update images provided by the Windows Azure - platform. - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the image is located. The blob location must - belong to a storage account in the subscription specified by the - <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - name: - Specifies a name for the OS image that Windows Azure uses to - identify the image when creating one or more VM Roles. - os: - The operating system type of the OS image. Possible values are: - Linux, Windows - ''' - _validate_not_none('image_name', image_name) - _validate_not_none('label', label) - _validate_not_none('media_link', media_link) - _validate_not_none('name', name) - _validate_not_none('os', os) - return self._perform_put(self._get_image_path(image_name), - _XmlSerializer.os_image_to_xml( - label, media_link, name, os), - async=True) - - def delete_os_image(self, image_name, delete_vhd=False): - ''' - Deletes the specified OS image from your image repository. - - image_name: The name of the image. - delete_vhd: Deletes the underlying vhd blob in Azure storage. - ''' - _validate_not_none('image_name', image_name) - path = self._get_image_path(image_name) - if delete_vhd: - path += '?comp=media' - return self._perform_delete(path, async=True) - - #--Operations for virtual machine disks ------------------------------ - def get_data_disk(self, service_name, deployment_name, role_name, lun): - ''' - Retrieves the specified data disk from a virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - lun: The Logical Unit Number (LUN) for the disk. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('lun', lun) - return self._perform_get( - self._get_data_disk_path( - service_name, deployment_name, role_name, lun), - DataVirtualHardDisk) - - def add_data_disk(self, service_name, deployment_name, role_name, lun, - host_caching=None, media_link=None, disk_label=None, - disk_name=None, logical_disk_size_in_gb=None, - source_media_link=None): - ''' - Adds a data disk to a virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - lun: - Specifies the Logical Unit Number (LUN) for the disk. The LUN - specifies the slot in which the data drive appears when mounted - for usage by the virtual machine. Valid LUN values are 0 through 15. - host_caching: - Specifies the platform caching behavior of data disk blob for - read/write efficiency. The default vault is ReadOnly. Possible - values are: None, ReadOnly, ReadWrite - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the disk is located. The blob location must - belong to the storage account in the subscription specified by the - <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - disk_label: - Specifies the description of the data disk. When you attach a disk, - either by directly referencing a media using the MediaLink element - or specifying the target disk size, you can use the DiskLabel - element to customize the name property of the target data disk. - disk_name: - Specifies the name of the disk. Windows Azure uses the specified - disk to create the data disk for the machine and populates this - field with the disk name. - logical_disk_size_in_gb: - Specifies the size, in GB, of an empty disk to be attached to the - role. The disk can be created as part of disk attach or create VM - role call by specifying the value for this property. Windows Azure - creates the empty disk based on size preference and attaches the - newly created disk to the Role. - source_media_link: - Specifies the location of a blob in account storage which is - mounted as a data disk when the virtual machine is created. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('lun', lun) - return self._perform_post( - self._get_data_disk_path(service_name, deployment_name, role_name), - _XmlSerializer.data_virtual_hard_disk_to_xml( - host_caching, - disk_label, - disk_name, - lun, - logical_disk_size_in_gb, - media_link, - source_media_link), - async=True) - - def update_data_disk(self, service_name, deployment_name, role_name, lun, - host_caching=None, media_link=None, updated_lun=None, - disk_label=None, disk_name=None, - logical_disk_size_in_gb=None): - ''' - Updates the specified data disk attached to the specified virtual - machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - lun: - Specifies the Logical Unit Number (LUN) for the disk. The LUN - specifies the slot in which the data drive appears when mounted - for usage by the virtual machine. Valid LUN values are 0 through - 15. - host_caching: - Specifies the platform caching behavior of data disk blob for - read/write efficiency. The default vault is ReadOnly. Possible - values are: None, ReadOnly, ReadWrite - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the disk is located. The blob location must - belong to the storage account in the subscription specified by - the <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - updated_lun: - Specifies the Logical Unit Number (LUN) for the disk. The LUN - specifies the slot in which the data drive appears when mounted - for usage by the virtual machine. Valid LUN values are 0 through 15. - disk_label: - Specifies the description of the data disk. When you attach a disk, - either by directly referencing a media using the MediaLink element - or specifying the target disk size, you can use the DiskLabel - element to customize the name property of the target data disk. - disk_name: - Specifies the name of the disk. Windows Azure uses the specified - disk to create the data disk for the machine and populates this - field with the disk name. - logical_disk_size_in_gb: - Specifies the size, in GB, of an empty disk to be attached to the - role. The disk can be created as part of disk attach or create VM - role call by specifying the value for this property. Windows Azure - creates the empty disk based on size preference and attaches the - newly created disk to the Role. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('lun', lun) - return self._perform_put( - self._get_data_disk_path( - service_name, deployment_name, role_name, lun), - _XmlSerializer.data_virtual_hard_disk_to_xml( - host_caching, - disk_label, - disk_name, - updated_lun, - logical_disk_size_in_gb, - media_link, - None), - async=True) - - def delete_data_disk(self, service_name, deployment_name, role_name, lun, delete_vhd=False): - ''' - Removes the specified data disk from a virtual machine. - - service_name: The name of the service. - deployment_name: The name of the deployment. - role_name: The name of the role. - lun: The Logical Unit Number (LUN) for the disk. - delete_vhd: Deletes the underlying vhd blob in Azure storage. - ''' - _validate_not_none('service_name', service_name) - _validate_not_none('deployment_name', deployment_name) - _validate_not_none('role_name', role_name) - _validate_not_none('lun', lun) - path = self._get_data_disk_path(service_name, deployment_name, role_name, lun) - if delete_vhd: - path += '?comp=media' - return self._perform_delete(path, async=True) - - #--Operations for virtual machine disks ------------------------------ - def list_disks(self): - ''' - Retrieves a list of the disks in your image repository. - ''' - return self._perform_get(self._get_disk_path(), - Disks) - - def get_disk(self, disk_name): - ''' - Retrieves a disk from your image repository. - ''' - return self._perform_get(self._get_disk_path(disk_name), - Disk) - - def add_disk(self, has_operating_system, label, media_link, name, os): - ''' - Adds a disk to the user image repository. The disk can be an OS disk - or a data disk. - - has_operating_system: - Specifies whether the disk contains an operation system. Only a - disk with an operating system installed can be mounted as OS Drive. - label: Specifies the description of the disk. - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the disk is located. The blob location must - belong to the storage account in the current subscription specified - by the <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - name: - Specifies a name for the disk. Windows Azure uses the name to - identify the disk when creating virtual machines from the disk. - os: The OS type of the disk. Possible values are: Linux, Windows - ''' - _validate_not_none('has_operating_system', has_operating_system) - _validate_not_none('label', label) - _validate_not_none('media_link', media_link) - _validate_not_none('name', name) - _validate_not_none('os', os) - return self._perform_post(self._get_disk_path(), - _XmlSerializer.disk_to_xml( - has_operating_system, - label, - media_link, - name, - os)) - - def update_disk(self, disk_name, has_operating_system, label, media_link, - name, os): - ''' - Updates an existing disk in your image repository. - - disk_name: The name of the disk to update. - has_operating_system: - Specifies whether the disk contains an operation system. Only a - disk with an operating system installed can be mounted as OS Drive. - label: Specifies the description of the disk. - media_link: - Specifies the location of the blob in Windows Azure blob store - where the media for the disk is located. The blob location must - belong to the storage account in the current subscription specified - by the <subscription-id> value in the operation call. Example: - http://example.blob.core.windows.net/disks/mydisk.vhd - name: - Specifies a name for the disk. Windows Azure uses the name to - identify the disk when creating virtual machines from the disk. - os: The OS type of the disk. Possible values are: Linux, Windows - ''' - _validate_not_none('disk_name', disk_name) - _validate_not_none('has_operating_system', has_operating_system) - _validate_not_none('label', label) - _validate_not_none('media_link', media_link) - _validate_not_none('name', name) - _validate_not_none('os', os) - return self._perform_put(self._get_disk_path(disk_name), - _XmlSerializer.disk_to_xml( - has_operating_system, - label, - media_link, - name, - os)) - - def delete_disk(self, disk_name, delete_vhd=False): - ''' - Deletes the specified data or operating system disk from your image - repository. - - disk_name: The name of the disk to delete. - delete_vhd: Deletes the underlying vhd blob in Azure storage. - ''' - _validate_not_none('disk_name', disk_name) - path = self._get_disk_path(disk_name) - if delete_vhd: - path += '?comp=media' - return self._perform_delete(path) - - #--Operations for virtual networks ------------------------------ - def list_virtual_network_sites(self): - ''' - Retrieves a list of the virtual networks. - ''' - return self._perform_get(self._get_virtual_network_site_path(), VirtualNetworkSites) - - #--Helper functions -------------------------------------------------- - def _get_virtual_network_site_path(self): - return self._get_path('services/networking/virtualnetwork', None) - - def _get_storage_service_path(self, service_name=None): - return self._get_path('services/storageservices', service_name) - - def _get_hosted_service_path(self, service_name=None): - return self._get_path('services/hostedservices', service_name) - - def _get_deployment_path_using_slot(self, service_name, slot=None): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deploymentslots', slot) - - def _get_deployment_path_using_name(self, service_name, - deployment_name=None): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deployments', deployment_name) - - def _get_role_path(self, service_name, deployment_name, role_name=None): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deployments/' + deployment_name + - '/roles', role_name) - - def _get_role_instance_operations_path(self, service_name, deployment_name, - role_name=None): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deployments/' + deployment_name + - '/roleinstances', role_name) + '/Operations' - - def _get_roles_operations_path(self, service_name, deployment_name): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deployments/' + deployment_name + - '/roles/Operations', None) - - def _get_data_disk_path(self, service_name, deployment_name, role_name, - lun=None): - return self._get_path('services/hostedservices/' + _str(service_name) + - '/deployments/' + _str(deployment_name) + - '/roles/' + _str(role_name) + '/DataDisks', lun) - - def _get_disk_path(self, disk_name=None): - return self._get_path('services/disks', disk_name) - - def _get_image_path(self, image_name=None): - return self._get_path('services/images', image_name) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + WindowsAzureError, + MANAGEMENT_HOST, + _str, + _validate_not_none, + ) +from azure.servicemanagement import ( + AffinityGroups, + AffinityGroup, + AvailabilityResponse, + Certificate, + Certificates, + DataVirtualHardDisk, + Deployment, + Disk, + Disks, + Locations, + Operation, + HostedService, + HostedServices, + Images, + OperatingSystems, + OperatingSystemFamilies, + OSImage, + PersistentVMRole, + ResourceExtensions, + ReservedIP, + ReservedIPs, + RoleSize, + RoleSizes, + StorageService, + StorageServices, + Subscription, + Subscriptions, + SubscriptionCertificate, + SubscriptionCertificates, + VirtualNetworkSites, + VMImages, + _XmlSerializer, + ) +from azure.servicemanagement.servicemanagementclient import ( + _ServiceManagementClient, + ) + +class ServiceManagementService(_ServiceManagementClient): + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + ''' + Initializes the management service. + + subscription_id: Subscription to manage. + cert_file: + Path to .pem certificate file (httplib), or location of the + certificate in your Personal certificate store (winhttp) in the + CURRENT_USER\my\CertificateName format. + If a request_session is specified, then this is unused. + host: Live ServiceClient URL. Defaults to Azure public cloud. + request_session: + Session object to use for http requests. If this is specified, it + replaces the default use of httplib or winhttp. Also, the cert_file + parameter is unused when a session is passed in. + The session object handles authentication, and as such can support + multiple types of authentication: .pem certificate, oauth. + For example, you can pass in a Session instance from the requests + library. To use .pem certificate authentication with requests + library, set the path to the .pem file on the session.cert + attribute. + ''' + super(ServiceManagementService, self).__init__( + subscription_id, cert_file, host, request_session) + + #--Operations for subscriptions -------------------------------------- + def list_role_sizes(self): + ''' + Lists the role sizes that are available under the specified + subscription. + ''' + return self._perform_get(self._get_role_sizes_path(), + RoleSizes) + + def list_subscriptions(self): + ''' + Returns a list of subscriptions that you can access. + + You must make sure that the request that is made to the management + service is secure using an Active Directory access token. + ''' + return self._perform_get(self._get_subscriptions_path(), + Subscriptions) + + #--Operations for storage accounts ----------------------------------- + def list_storage_accounts(self): + ''' + Lists the storage accounts available under the current subscription. + ''' + return self._perform_get(self._get_storage_service_path(), + StorageServices) + + def get_storage_account_properties(self, service_name): + ''' + Returns system properties for the specified storage account. + + service_name: Name of the storage service account. + ''' + _validate_not_none('service_name', service_name) + return self._perform_get(self._get_storage_service_path(service_name), + StorageService) + + def get_storage_account_keys(self, service_name): + ''' + Returns the primary and secondary access keys for the specified + storage account. + + service_name: Name of the storage service account. + ''' + _validate_not_none('service_name', service_name) + return self._perform_get( + self._get_storage_service_path(service_name) + '/keys', + StorageService) + + def regenerate_storage_account_keys(self, service_name, key_type): + ''' + Regenerates the primary or secondary access key for the specified + storage account. + + service_name: Name of the storage service account. + key_type: + Specifies which key to regenerate. Valid values are: + Primary, Secondary + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('key_type', key_type) + return self._perform_post( + self._get_storage_service_path( + service_name) + '/keys?action=regenerate', + _XmlSerializer.regenerate_keys_to_xml( + key_type), + StorageService) + + def create_storage_account(self, service_name, description, label, + affinity_group=None, location=None, + geo_replication_enabled=None, + extended_properties=None, + account_type='Standard_GRS'): + ''' + Creates a new storage account in Windows Azure. + + service_name: + A name for the storage account that is unique within Windows Azure. + Storage account names must be between 3 and 24 characters in length + and use numbers and lower-case letters only. + description: + A description for the storage account. The description may be up + to 1024 characters in length. + label: + A name for the storage account. The name may be up to 100 + characters in length. The name can be used to identify the storage + account for your tracking purposes. + affinity_group: + The name of an existing affinity group in the specified + subscription. You can specify either a location or affinity_group, + but not both. + location: + The location where the storage account is created. You can specify + either a location or affinity_group, but not both. + geo_replication_enabled: + Deprecated. Replaced by the account_type parameter. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + account_type: + Specifies whether the account supports locally-redundant storage, + geo-redundant storage, zone-redundant storage, or read access + geo-redundant storage. + Possible values are: + Standard_LRS, Standard_ZRS, Standard_GRS, Standard_RAGRS + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('description', description) + _validate_not_none('label', label) + if affinity_group is None and location is None: + raise WindowsAzureError( + 'location or affinity_group must be specified') + if affinity_group is not None and location is not None: + raise WindowsAzureError( + 'Only one of location or affinity_group needs to be specified') + if geo_replication_enabled == False: + account_type = 'Standard_LRS' + return self._perform_post( + self._get_storage_service_path(), + _XmlSerializer.create_storage_service_input_to_xml( + service_name, + description, + label, + affinity_group, + location, + account_type, + extended_properties), + async=True) + + def update_storage_account(self, service_name, description=None, + label=None, geo_replication_enabled=None, + extended_properties=None, + account_type='Standard_GRS'): + ''' + Updates the label, the description, and enables or disables the + geo-replication status for a storage account in Windows Azure. + + service_name: Name of the storage service account. + description: + A description for the storage account. The description may be up + to 1024 characters in length. + label: + A name for the storage account. The name may be up to 100 + characters in length. The name can be used to identify the storage + account for your tracking purposes. + geo_replication_enabled: + Deprecated. Replaced by the account_type parameter. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + account_type: + Specifies whether the account supports locally-redundant storage, + geo-redundant storage, zone-redundant storage, or read access + geo-redundant storage. + Possible values are: + Standard_LRS, Standard_ZRS, Standard_GRS, Standard_RAGRS + ''' + _validate_not_none('service_name', service_name) + if geo_replication_enabled == False: + account_type = 'Standard_LRS' + return self._perform_put( + self._get_storage_service_path(service_name), + _XmlSerializer.update_storage_service_input_to_xml( + description, + label, + account_type, + extended_properties)) + + def delete_storage_account(self, service_name): + ''' + Deletes the specified storage account from Windows Azure. + + service_name: Name of the storage service account. + ''' + _validate_not_none('service_name', service_name) + return self._perform_delete( + self._get_storage_service_path(service_name)) + + def check_storage_account_name_availability(self, service_name): + ''' + Checks to see if the specified storage account name is available, or + if it has already been taken. + + service_name: Name of the storage service account. + ''' + _validate_not_none('service_name', service_name) + return self._perform_get( + self._get_storage_service_path() + + '/operations/isavailable/' + + _str(service_name) + '', + AvailabilityResponse) + + #--Operations for hosted services ------------------------------------ + def list_hosted_services(self): + ''' + Lists the hosted services available under the current subscription. + ''' + return self._perform_get(self._get_hosted_service_path(), + HostedServices) + + def get_hosted_service_properties(self, service_name, embed_detail=False): + ''' + Retrieves system properties for the specified hosted service. These + properties include the service name and service type; the name of the + affinity group to which the service belongs, or its location if it is + not part of an affinity group; and optionally, information on the + service's deployments. + + service_name: Name of the hosted service. + embed_detail: + When True, the management service returns properties for all + deployments of the service, as well as for the service itself. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('embed_detail', embed_detail) + return self._perform_get( + self._get_hosted_service_path(service_name) + + '?embed-detail=' + + _str(embed_detail).lower(), + HostedService) + + def create_hosted_service(self, service_name, label, description=None, + location=None, affinity_group=None, + extended_properties=None): + ''' + Creates a new hosted service in Windows Azure. + + service_name: + A name for the hosted service that is unique within Windows Azure. + This name is the DNS prefix name and can be used to access the + hosted service. + label: + A name for the hosted service. The name can be up to 100 characters + in length. The name can be used to identify the storage account for + your tracking purposes. + description: + A description for the hosted service. The description can be up to + 1024 characters in length. + location: + The location where the hosted service will be created. You can + specify either a location or affinity_group, but not both. + affinity_group: + The name of an existing affinity group associated with this + subscription. This name is a GUID and can be retrieved by examining + the name element of the response body returned by + list_affinity_groups. You can specify either a location or + affinity_group, but not both. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('label', label) + if affinity_group is None and location is None: + raise WindowsAzureError( + 'location or affinity_group must be specified') + if affinity_group is not None and location is not None: + raise WindowsAzureError( + 'Only one of location or affinity_group needs to be specified') + return self._perform_post(self._get_hosted_service_path(), + _XmlSerializer.create_hosted_service_to_xml( + service_name, + label, + description, + location, + affinity_group, + extended_properties)) + + def update_hosted_service(self, service_name, label=None, description=None, + extended_properties=None): + ''' + Updates the label and/or the description for a hosted service in + Windows Azure. + + service_name: Name of the hosted service. + label: + A name for the hosted service. The name may be up to 100 characters + in length. You must specify a value for either Label or + Description, or for both. It is recommended that the label be + unique within the subscription. The name can be used + identify the hosted service for your tracking purposes. + description: + A description for the hosted service. The description may be up to + 1024 characters in length. You must specify a value for either + Label or Description, or for both. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + ''' + _validate_not_none('service_name', service_name) + return self._perform_put(self._get_hosted_service_path(service_name), + _XmlSerializer.update_hosted_service_to_xml( + label, + description, + extended_properties)) + + def delete_hosted_service(self, service_name): + ''' + Deletes the specified hosted service from Windows Azure. + + service_name: Name of the hosted service. + ''' + _validate_not_none('service_name', service_name) + return self._perform_delete(self._get_hosted_service_path(service_name)) + + def get_deployment_by_slot(self, service_name, deployment_slot): + ''' + Returns configuration information, status, and system properties for + a deployment. + + service_name: Name of the hosted service. + deployment_slot: + The environment to which the hosted service is deployed. Valid + values are: staging, production + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_slot', deployment_slot) + return self._perform_get( + self._get_deployment_path_using_slot( + service_name, deployment_slot), + Deployment) + + def get_deployment_by_name(self, service_name, deployment_name): + ''' + Returns configuration information, status, and system properties for a + deployment. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + return self._perform_get( + self._get_deployment_path_using_name( + service_name, deployment_name), + Deployment) + + def create_deployment(self, service_name, deployment_slot, name, + package_url, label, configuration, + start_deployment=False, + treat_warnings_as_error=False, + extended_properties=None): + ''' + Uploads a new service package and creates a new deployment on staging + or production. + + service_name: Name of the hosted service. + deployment_slot: + The environment to which the hosted service is deployed. Valid + values are: staging, production + name: + The name for the deployment. The deployment name must be unique + among other deployments for the hosted service. + package_url: + A URL that refers to the location of the service package in the + Blob service. The service package can be located either in a + storage account beneath the same subscription or a Shared Access + Signature (SAS) URI from any storage account. + label: + A name for the hosted service. The name can be up to 100 characters + in length. It is recommended that the label be unique within the + subscription. The name can be used to identify the hosted service + for your tracking purposes. + configuration: + The base-64 encoded service configuration file for the deployment. + start_deployment: + Indicates whether to start the deployment immediately after it is + created. If false, the service model is still deployed to the + virtual machines but the code is not run immediately. Instead, the + service is Suspended until you call Update Deployment Status and + set the status to Running, at which time the service will be + started. A deployed service still incurs charges, even if it is + suspended. + treat_warnings_as_error: + Indicates whether to treat package validation warnings as errors. + If set to true, the Created Deployment operation fails if there + are validation warnings on the service package. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_slot', deployment_slot) + _validate_not_none('name', name) + _validate_not_none('package_url', package_url) + _validate_not_none('label', label) + _validate_not_none('configuration', configuration) + return self._perform_post( + self._get_deployment_path_using_slot( + service_name, deployment_slot), + _XmlSerializer.create_deployment_to_xml( + name, + package_url, + label, + configuration, + start_deployment, + treat_warnings_as_error, + extended_properties), + async=True) + + def delete_deployment(self, service_name, deployment_name): + ''' + Deletes the specified deployment. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + return self._perform_delete( + self._get_deployment_path_using_name( + service_name, deployment_name), + async=True) + + def swap_deployment(self, service_name, production, source_deployment): + ''' + Initiates a virtual IP swap between the staging and production + deployment environments for a service. If the service is currently + running in the staging environment, it will be swapped to the + production environment. If it is running in the production + environment, it will be swapped to staging. + + service_name: Name of the hosted service. + production: The name of the production deployment. + source_deployment: The name of the source deployment. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('production', production) + _validate_not_none('source_deployment', source_deployment) + return self._perform_post(self._get_hosted_service_path(service_name), + _XmlSerializer.swap_deployment_to_xml( + production, source_deployment), + async=True) + + def change_deployment_configuration(self, service_name, deployment_name, + configuration, + treat_warnings_as_error=False, + mode='Auto', extended_properties=None): + ''' + Initiates a change to the deployment configuration. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + configuration: + The base-64 encoded service configuration file for the deployment. + treat_warnings_as_error: + Indicates whether to treat package validation warnings as errors. + If set to true, the Created Deployment operation fails if there + are validation warnings on the service package. + mode: + If set to Manual, WalkUpgradeDomain must be called to apply the + update. If set to Auto, the Windows Azure platform will + automatically apply the update To each upgrade domain for the + service. Possible values are: Auto, Manual + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('configuration', configuration) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/?comp=config', + _XmlSerializer.change_deployment_to_xml( + configuration, + treat_warnings_as_error, + mode, + extended_properties), + async=True) + + def update_deployment_status(self, service_name, deployment_name, status): + ''' + Initiates a change in deployment status. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + status: + The change to initiate to the deployment status. Possible values + include: Running, Suspended + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('status', status) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/?comp=status', + _XmlSerializer.update_deployment_status_to_xml( + status), + async=True) + + def upgrade_deployment(self, service_name, deployment_name, mode, + package_url, configuration, label, force, + role_to_upgrade=None, extended_properties=None): + ''' + Initiates an upgrade. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + mode: + If set to Manual, WalkUpgradeDomain must be called to apply the + update. If set to Auto, the Windows Azure platform will + automatically apply the update To each upgrade domain for the + service. Possible values are: Auto, Manual + package_url: + A URL that refers to the location of the service package in the + Blob service. The service package can be located either in a + storage account beneath the same subscription or a Shared Access + Signature (SAS) URI from any storage account. + configuration: + The base-64 encoded service configuration file for the deployment. + label: + A name for the hosted service. The name can be up to 100 characters + in length. It is recommended that the label be unique within the + subscription. The name can be used to identify the hosted service + for your tracking purposes. + force: + Specifies whether the rollback should proceed even when it will + cause local data to be lost from some role instances. True if the + rollback should proceed; otherwise false if the rollback should + fail. + role_to_upgrade: The name of the specific role to upgrade. + extended_properties: + Dictionary containing name/value pairs of storage account + properties. You can have a maximum of 50 extended property + name/value pairs. The maximum length of the Name element is 64 + characters, only alphanumeric characters and underscores are valid + in the Name, and the name must start with a letter. The value has + a maximum length of 255 characters. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('mode', mode) + _validate_not_none('package_url', package_url) + _validate_not_none('configuration', configuration) + _validate_not_none('label', label) + _validate_not_none('force', force) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/?comp=upgrade', + _XmlSerializer.upgrade_deployment_to_xml( + mode, + package_url, + configuration, + label, + role_to_upgrade, + force, + extended_properties), + async=True) + + def walk_upgrade_domain(self, service_name, deployment_name, + upgrade_domain): + ''' + Specifies the next upgrade domain to be walked during manual in-place + upgrade or configuration change. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + upgrade_domain: + An integer value that identifies the upgrade domain to walk. + Upgrade domains are identified with a zero-based index: the first + upgrade domain has an ID of 0, the second has an ID of 1, and so on. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('upgrade_domain', upgrade_domain) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/?comp=walkupgradedomain', + _XmlSerializer.walk_upgrade_domain_to_xml( + upgrade_domain), + async=True) + + def rollback_update_or_upgrade(self, service_name, deployment_name, mode, + force): + ''' + Cancels an in progress configuration change (update) or upgrade and + returns the deployment to its state before the upgrade or + configuration change was started. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + mode: + Specifies whether the rollback should proceed automatically. + auto - The rollback proceeds without further user input. + manual - You must call the Walk Upgrade Domain operation to + apply the rollback to each upgrade domain. + force: + Specifies whether the rollback should proceed even when it will + cause local data to be lost from some role instances. True if the + rollback should proceed; otherwise false if the rollback should + fail. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('mode', mode) + _validate_not_none('force', force) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/?comp=rollback', + _XmlSerializer.rollback_upgrade_to_xml( + mode, force), + async=True) + + def reboot_role_instance(self, service_name, deployment_name, + role_instance_name): + ''' + Requests a reboot of a role instance that is running in a deployment. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + role_instance_name: The name of the role instance. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_instance_name', role_instance_name) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + \ + '/roleinstances/' + _str(role_instance_name) + \ + '?comp=reboot', + '', + async=True) + + def reimage_role_instance(self, service_name, deployment_name, + role_instance_name): + ''' + Requests a reimage of a role instance that is running in a deployment. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + role_instance_name: The name of the role instance. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_instance_name', role_instance_name) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + \ + '/roleinstances/' + _str(role_instance_name) + \ + '?comp=reimage', + '', + async=True) + + def rebuild_role_instance(self, service_name, deployment_name, + role_instance_name): + ''' + Reinstalls the operating system on instances of web roles or worker + roles and initializes the storage resources that are used by them. If + you do not want to initialize storage resources, you can use + reimage_role_instance. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + role_instance_name: The name of the role instance. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_instance_name', role_instance_name) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + \ + '/roleinstances/' + _str(role_instance_name) + \ + '?comp=rebuild&resources=allLocalDrives', + '', + async=True) + + def delete_role_instances(self, service_name, deployment_name, + role_instance_names): + ''' + Reinstalls the operating system on instances of web roles or worker + roles and initializes the storage resources that are used by them. If + you do not want to initialize storage resources, you can use + reimage_role_instance. + + service_name: Name of the hosted service. + deployment_name: The name of the deployment. + role_instance_names: List of role instance names. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_instance_names', role_instance_names) + return self._perform_post( + self._get_deployment_path_using_name( + service_name, deployment_name) + '/roleinstances/?comp=delete', + _XmlSerializer.role_instances_to_xml(role_instance_names), + async=True) + + def check_hosted_service_name_availability(self, service_name): + ''' + Checks to see if the specified hosted service name is available, or if + it has already been taken. + + service_name: Name of the hosted service. + ''' + _validate_not_none('service_name', service_name) + return self._perform_get( + '/' + self.subscription_id + + '/services/hostedservices/operations/isavailable/' + + _str(service_name) + '', + AvailabilityResponse) + + #--Operations for service certificates ------------------------------- + def list_service_certificates(self, service_name): + ''' + Lists all of the service certificates associated with the specified + hosted service. + + service_name: Name of the hosted service. + ''' + _validate_not_none('service_name', service_name) + return self._perform_get( + '/' + self.subscription_id + '/services/hostedservices/' + + _str(service_name) + '/certificates', + Certificates) + + def get_service_certificate(self, service_name, thumbalgorithm, thumbprint): + ''' + Returns the public data for the specified X.509 certificate associated + with a hosted service. + + service_name: Name of the hosted service. + thumbalgorithm: The algorithm for the certificate's thumbprint. + thumbprint: The hexadecimal representation of the thumbprint. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('thumbalgorithm', thumbalgorithm) + _validate_not_none('thumbprint', thumbprint) + return self._perform_get( + '/' + self.subscription_id + '/services/hostedservices/' + + _str(service_name) + '/certificates/' + + _str(thumbalgorithm) + '-' + _str(thumbprint) + '', + Certificate) + + def add_service_certificate(self, service_name, data, certificate_format, + password): + ''' + Adds a certificate to a hosted service. + + service_name: Name of the hosted service. + data: The base-64 encoded form of the pfx file. + certificate_format: + The service certificate format. The only supported value is pfx. + password: The certificate password. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('data', data) + _validate_not_none('certificate_format', certificate_format) + _validate_not_none('password', password) + return self._perform_post( + '/' + self.subscription_id + '/services/hostedservices/' + + _str(service_name) + '/certificates', + _XmlSerializer.certificate_file_to_xml( + data, certificate_format, password), + async=True) + + def delete_service_certificate(self, service_name, thumbalgorithm, + thumbprint): + ''' + Deletes a service certificate from the certificate store of a hosted + service. + + service_name: Name of the hosted service. + thumbalgorithm: The algorithm for the certificate's thumbprint. + thumbprint: The hexadecimal representation of the thumbprint. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('thumbalgorithm', thumbalgorithm) + _validate_not_none('thumbprint', thumbprint) + return self._perform_delete( + '/' + self.subscription_id + '/services/hostedservices/' + + _str(service_name) + '/certificates/' + + _str(thumbalgorithm) + '-' + _str(thumbprint), + async=True) + + #--Operations for management certificates ---------------------------- + def list_management_certificates(self): + ''' + The List Management Certificates operation lists and returns basic + information about all of the management certificates associated with + the specified subscription. Management certificates, which are also + known as subscription certificates, authenticate clients attempting to + connect to resources associated with your Windows Azure subscription. + ''' + return self._perform_get('/' + self.subscription_id + '/certificates', + SubscriptionCertificates) + + def get_management_certificate(self, thumbprint): + ''' + The Get Management Certificate operation retrieves information about + the management certificate with the specified thumbprint. Management + certificates, which are also known as subscription certificates, + authenticate clients attempting to connect to resources associated + with your Windows Azure subscription. + + thumbprint: The thumbprint value of the certificate. + ''' + _validate_not_none('thumbprint', thumbprint) + return self._perform_get( + '/' + self.subscription_id + '/certificates/' + _str(thumbprint), + SubscriptionCertificate) + + def add_management_certificate(self, public_key, thumbprint, data): + ''' + The Add Management Certificate operation adds a certificate to the + list of management certificates. Management certificates, which are + also known as subscription certificates, authenticate clients + attempting to connect to resources associated with your Windows Azure + subscription. + + public_key: + A base64 representation of the management certificate public key. + thumbprint: + The thumb print that uniquely identifies the management + certificate. + data: The certificate's raw data in base-64 encoded .cer format. + ''' + _validate_not_none('public_key', public_key) + _validate_not_none('thumbprint', thumbprint) + _validate_not_none('data', data) + return self._perform_post( + '/' + self.subscription_id + '/certificates', + _XmlSerializer.subscription_certificate_to_xml( + public_key, thumbprint, data)) + + def delete_management_certificate(self, thumbprint): + ''' + The Delete Management Certificate operation deletes a certificate from + the list of management certificates. Management certificates, which + are also known as subscription certificates, authenticate clients + attempting to connect to resources associated with your Windows Azure + subscription. + + thumbprint: + The thumb print that uniquely identifies the management + certificate. + ''' + _validate_not_none('thumbprint', thumbprint) + return self._perform_delete( + '/' + self.subscription_id + '/certificates/' + _str(thumbprint)) + + #--Operations for affinity groups ------------------------------------ + def list_affinity_groups(self): + ''' + Lists the affinity groups associated with the specified subscription. + ''' + return self._perform_get( + '/' + self.subscription_id + '/affinitygroups', + AffinityGroups) + + def get_affinity_group_properties(self, affinity_group_name): + ''' + Returns the system properties associated with the specified affinity + group. + + affinity_group_name: The name of the affinity group. + ''' + _validate_not_none('affinity_group_name', affinity_group_name) + return self._perform_get( + '/' + self.subscription_id + '/affinitygroups/' + + _str(affinity_group_name) + '', + AffinityGroup) + + def create_affinity_group(self, name, label, location, description=None): + ''' + Creates a new affinity group for the specified subscription. + + name: A name for the affinity group that is unique to the subscription. + label: + A name for the affinity group. The name can be up to 100 characters + in length. + location: + The data center location where the affinity group will be created. + To list available locations, use the list_location function. + description: + A description for the affinity group. The description can be up to + 1024 characters in length. + ''' + _validate_not_none('name', name) + _validate_not_none('label', label) + _validate_not_none('location', location) + return self._perform_post( + '/' + self.subscription_id + '/affinitygroups', + _XmlSerializer.create_affinity_group_to_xml(name, + label, + description, + location)) + + def update_affinity_group(self, affinity_group_name, label, + description=None): + ''' + Updates the label and/or the description for an affinity group for the + specified subscription. + + affinity_group_name: The name of the affinity group. + label: + A name for the affinity group. The name can be up to 100 characters + in length. + description: + A description for the affinity group. The description can be up to + 1024 characters in length. + ''' + _validate_not_none('affinity_group_name', affinity_group_name) + _validate_not_none('label', label) + return self._perform_put( + '/' + self.subscription_id + '/affinitygroups/' + + _str(affinity_group_name), + _XmlSerializer.update_affinity_group_to_xml(label, description)) + + def delete_affinity_group(self, affinity_group_name): + ''' + Deletes an affinity group in the specified subscription. + + affinity_group_name: The name of the affinity group. + ''' + _validate_not_none('affinity_group_name', affinity_group_name) + return self._perform_delete('/' + self.subscription_id + \ + '/affinitygroups/' + \ + _str(affinity_group_name)) + + #--Operations for locations ------------------------------------------ + def list_locations(self): + ''' + Lists all of the data center locations that are valid for your + subscription. + ''' + return self._perform_get('/' + self.subscription_id + '/locations', + Locations) + + #--Operations for tracking asynchronous requests --------------------- + def get_operation_status(self, request_id): + ''' + Returns the status of the specified operation. After calling an + asynchronous operation, you can call Get Operation Status to determine + whether the operation has succeeded, failed, or is still in progress. + + request_id: The request ID for the request you wish to track. + ''' + _validate_not_none('request_id', request_id) + return self._perform_get( + '/' + self.subscription_id + '/operations/' + _str(request_id), + Operation) + + #--Operations for retrieving operating system information ------------ + def list_operating_systems(self): + ''' + Lists the versions of the guest operating system that are currently + available in Windows Azure. + ''' + return self._perform_get( + '/' + self.subscription_id + '/operatingsystems', + OperatingSystems) + + def list_operating_system_families(self): + ''' + Lists the guest operating system families available in Windows Azure, + and also lists the operating system versions available for each family. + ''' + return self._perform_get( + '/' + self.subscription_id + '/operatingsystemfamilies', + OperatingSystemFamilies) + + #--Operations for retrieving subscription history -------------------- + def get_subscription(self): + ''' + Returns account and resource allocation information on the specified + subscription. + ''' + return self._perform_get('/' + self.subscription_id + '', + Subscription) + + #--Operations for reserved ip addresses ----------------------------- + def create_reserved_ip_address(self, name, label=None, location=None): + ''' + Reserves an IPv4 address for the specified subscription. + + name: + Required. Specifies the name for the reserved IP address. + label: + Optional. Specifies a label for the reserved IP address. The label + can be up to 100 characters long and can be used for your tracking + purposes. + location: + Required. Specifies the location of the reserved IP address. This + should be the same location that is assigned to the cloud service + containing the deployment that will use the reserved IP address. + To see the available locations, you can use list_locations. + ''' + _validate_not_none('name', name) + return self._perform_post( + self._get_reserved_ip_path(), + _XmlSerializer.create_reserved_ip_to_xml(name, label, location)) + + def delete_reserved_ip_address(self, name): + ''' + Deletes a reserved IP address from the specified subscription. + + name: Required. Name of the reserved IP address. + ''' + _validate_not_none('name', name) + return self._perform_delete(self._get_reserved_ip_path(name)) + + def get_reserved_ip_address(self, name): + ''' + Retrieves information about the specified reserved IP address. + + name: Required. Name of the reserved IP address. + ''' + _validate_not_none('name', name) + return self._perform_get(self._get_reserved_ip_path(name), ReservedIP) + + def list_reserved_ip_addresses(self): + ''' + Lists the IP addresses that have been reserved for the specified + subscription. + ''' + return self._perform_get(self._get_reserved_ip_path(), ReservedIPs) + + #--Operations for virtual machines ----------------------------------- + def get_role(self, service_name, deployment_name, role_name): + ''' + Retrieves the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_get( + self._get_role_path(service_name, deployment_name, role_name), + PersistentVMRole) + + def create_virtual_machine_deployment(self, service_name, deployment_name, + deployment_slot, label, role_name, + system_config, os_virtual_hard_disk, + network_config=None, + availability_set_name=None, + data_virtual_hard_disks=None, + role_size=None, + role_type='PersistentVMRole', + virtual_network_name=None, + resource_extension_references=None, + provision_guest_agent=None, + vm_image_name=None, + media_location=None, + dns_servers=None, + reserved_ip_name=None): + ''' + Provisions a virtual machine based on the supplied configuration. + + service_name: Name of the hosted service. + deployment_name: + The name for the deployment. The deployment name must be unique + among other deployments for the hosted service. + deployment_slot: + The environment to which the hosted service is deployed. Valid + values are: staging, production + label: + Specifies an identifier for the deployment. The label can be up to + 100 characters long. The label can be used for tracking purposes. + role_name: The name of the role. + system_config: + Contains the metadata required to provision a virtual machine from + a Windows or Linux OS image. Use an instance of + WindowsConfigurationSet or LinuxConfigurationSet. + os_virtual_hard_disk: + Contains the parameters Windows Azure uses to create the operating + system disk for the virtual machine. If you are creating a Virtual + Machine by using a VM Image, this parameter is not used. + network_config: + Encapsulates the metadata required to create the virtual network + configuration for a virtual machine. If you do not include a + network configuration set you will not be able to access the VM + through VIPs over the internet. If your virtual machine belongs to + a virtual network you can not specify which subnet address space + it resides under. + availability_set_name: + Specifies the name of an availability set to which to add the + virtual machine. This value controls the virtual machine + allocation in the Windows Azure environment. Virtual machines + specified in the same availability set are allocated to different + nodes to maximize availability. + data_virtual_hard_disks: + Contains the parameters Windows Azure uses to create a data disk + for a virtual machine. + role_size: + The size of the virtual machine to allocate. The default value is + Small. Possible values are: ExtraSmall, Small, Medium, Large, + ExtraLarge. The specified value must be compatible with the disk + selected in the OSVirtualHardDisk values. + role_type: + The type of the role for the virtual machine. The only supported + value is PersistentVMRole. + virtual_network_name: + Specifies the name of an existing virtual network to which the + deployment will belong. + resource_extension_references: + Optional. Contains a collection of resource extensions that are to + be installed on the Virtual Machine. This element is used if + provision_guest_agent is set to True. + provision_guest_agent: + Optional. Indicates whether the VM Agent is installed on the + Virtual Machine. To run a resource extension in a Virtual Machine, + this service must be installed. + vm_image_name: + Optional. Specifies the name of the VM Image that is to be used to + create the Virtual Machine. If this is specified, the + system_config and network_config parameters are not used. + media_location: + Optional. Required if the Virtual Machine is being created from a + published VM Image. Specifies the location of the VHD file that is + created when VMImageName specifies a published VM Image. + dns_servers: + Optional. List of DNS servers (use DnsServer class) to associate + with the Virtual Machine. + reserved_ip_name: + Optional. Specifies the name of a reserved IP address that is to be + assigned to the deployment. You must run create_reserved_ip_address + before you can assign the address to the deployment using this + element. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('deployment_slot', deployment_slot) + _validate_not_none('label', label) + _validate_not_none('role_name', role_name) + return self._perform_post( + self._get_deployment_path_using_name(service_name), + _XmlSerializer.virtual_machine_deployment_to_xml( + deployment_name, + deployment_slot, + label, + role_name, + system_config, + os_virtual_hard_disk, + role_type, + network_config, + availability_set_name, + data_virtual_hard_disks, + role_size, + virtual_network_name, + resource_extension_references, + provision_guest_agent, + vm_image_name, + media_location, + dns_servers, + reserved_ip_name), + async=True) + + def add_role(self, service_name, deployment_name, role_name, system_config, + os_virtual_hard_disk, network_config=None, + availability_set_name=None, data_virtual_hard_disks=None, + role_size=None, role_type='PersistentVMRole', + resource_extension_references=None, + provision_guest_agent=None, vm_image_name=None, + media_location=None): + ''' + Adds a virtual machine to an existing deployment. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + system_config: + Contains the metadata required to provision a virtual machine from + a Windows or Linux OS image. Use an instance of + WindowsConfigurationSet or LinuxConfigurationSet. + os_virtual_hard_disk: + Contains the parameters Windows Azure uses to create the operating + system disk for the virtual machine. If you are creating a Virtual + Machine by using a VM Image, this parameter is not used. + network_config: + Encapsulates the metadata required to create the virtual network + configuration for a virtual machine. If you do not include a + network configuration set you will not be able to access the VM + through VIPs over the internet. If your virtual machine belongs to + a virtual network you can not specify which subnet address space + it resides under. + availability_set_name: + Specifies the name of an availability set to which to add the + virtual machine. This value controls the virtual machine allocation + in the Windows Azure environment. Virtual machines specified in the + same availability set are allocated to different nodes to maximize + availability. + data_virtual_hard_disks: + Contains the parameters Windows Azure uses to create a data disk + for a virtual machine. + role_size: + The size of the virtual machine to allocate. The default value is + Small. Possible values are: ExtraSmall, Small, Medium, Large, + ExtraLarge. The specified value must be compatible with the disk + selected in the OSVirtualHardDisk values. + role_type: + The type of the role for the virtual machine. The only supported + value is PersistentVMRole. + resource_extension_references: + Optional. Contains a collection of resource extensions that are to + be installed on the Virtual Machine. This element is used if + provision_guest_agent is set to True. + provision_guest_agent: + Optional. Indicates whether the VM Agent is installed on the + Virtual Machine. To run a resource extension in a Virtual Machine, + this service must be installed. + vm_image_name: + Optional. Specifies the name of the VM Image that is to be used to + create the Virtual Machine. If this is specified, the + system_config and network_config parameters are not used. + media_location: + Optional. Required if the Virtual Machine is being created from a + published VM Image. Specifies the location of the VHD file that is + created when VMImageName specifies a published VM Image. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_post( + self._get_role_path(service_name, deployment_name), + _XmlSerializer.add_role_to_xml( + role_name, + system_config, + os_virtual_hard_disk, + role_type, + network_config, + availability_set_name, + data_virtual_hard_disks, + role_size, + resource_extension_references, + provision_guest_agent, + vm_image_name, + media_location), + async=True) + + def update_role(self, service_name, deployment_name, role_name, + os_virtual_hard_disk=None, network_config=None, + availability_set_name=None, data_virtual_hard_disks=None, + role_size=None, role_type='PersistentVMRole', + resource_extension_references=None, + provision_guest_agent=None): + ''' + Updates the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + os_virtual_hard_disk: + Contains the parameters Windows Azure uses to create the operating + system disk for the virtual machine. + network_config: + Encapsulates the metadata required to create the virtual network + configuration for a virtual machine. If you do not include a + network configuration set you will not be able to access the VM + through VIPs over the internet. If your virtual machine belongs to + a virtual network you can not specify which subnet address space + it resides under. + availability_set_name: + Specifies the name of an availability set to which to add the + virtual machine. This value controls the virtual machine allocation + in the Windows Azure environment. Virtual machines specified in the + same availability set are allocated to different nodes to maximize + availability. + data_virtual_hard_disks: + Contains the parameters Windows Azure uses to create a data disk + for a virtual machine. + role_size: + The size of the virtual machine to allocate. The default value is + Small. Possible values are: ExtraSmall, Small, Medium, Large, + ExtraLarge. The specified value must be compatible with the disk + selected in the OSVirtualHardDisk values. + role_type: + The type of the role for the virtual machine. The only supported + value is PersistentVMRole. + resource_extension_references: + Optional. Contains a collection of resource extensions that are to + be installed on the Virtual Machine. This element is used if + provision_guest_agent is set to True. + provision_guest_agent: + Optional. Indicates whether the VM Agent is installed on the + Virtual Machine. To run a resource extension in a Virtual Machine, + this service must be installed. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_put( + self._get_role_path(service_name, deployment_name, role_name), + _XmlSerializer.update_role_to_xml( + role_name, + os_virtual_hard_disk, + role_type, + network_config, + availability_set_name, + data_virtual_hard_disks, + role_size, + resource_extension_references, + provision_guest_agent), + async=True) + + def delete_role(self, service_name, deployment_name, role_name): + ''' + Deletes the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_delete( + self._get_role_path(service_name, deployment_name, role_name), + async=True) + + def capture_role(self, service_name, deployment_name, role_name, + post_capture_action, target_image_name, + target_image_label, provisioning_configuration=None): + ''' + The Capture Role operation captures a virtual machine image to your + image gallery. From the captured image, you can create additional + customized virtual machines. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + post_capture_action: + Specifies the action after capture operation completes. Possible + values are: Delete, Reprovision. + target_image_name: + Specifies the image name of the captured virtual machine. + target_image_label: + Specifies the friendly name of the captured virtual machine. + provisioning_configuration: + Use an instance of WindowsConfigurationSet or LinuxConfigurationSet. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('post_capture_action', post_capture_action) + _validate_not_none('target_image_name', target_image_name) + _validate_not_none('target_image_label', target_image_label) + return self._perform_post( + self._get_role_instance_operations_path( + service_name, deployment_name, role_name), + _XmlSerializer.capture_role_to_xml( + post_capture_action, + target_image_name, + target_image_label, + provisioning_configuration), + async=True) + + def start_role(self, service_name, deployment_name, role_name): + ''' + Starts the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_post( + self._get_role_instance_operations_path( + service_name, deployment_name, role_name), + _XmlSerializer.start_role_operation_to_xml(), + async=True) + + def start_roles(self, service_name, deployment_name, role_names): + ''' + Starts the specified virtual machines. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_names: The names of the roles, as an enumerable of strings. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_names', role_names) + return self._perform_post( + self._get_roles_operations_path(service_name, deployment_name), + _XmlSerializer.start_roles_operation_to_xml(role_names), + async=True) + + def restart_role(self, service_name, deployment_name, role_name): + ''' + Restarts the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + return self._perform_post( + self._get_role_instance_operations_path( + service_name, deployment_name, role_name), + _XmlSerializer.restart_role_operation_to_xml( + ), + async=True) + + def shutdown_role(self, service_name, deployment_name, role_name, + post_shutdown_action='Stopped'): + ''' + Shuts down the specified virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + post_shutdown_action: + Specifies how the Virtual Machine should be shut down. Values are: + Stopped + Shuts down the Virtual Machine but retains the compute + resources. You will continue to be billed for the resources + that the stopped machine uses. + StoppedDeallocated + Shuts down the Virtual Machine and releases the compute + resources. You are not billed for the compute resources that + this Virtual Machine uses. If a static Virtual Network IP + address is assigned to the Virtual Machine, it is reserved. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('post_shutdown_action', post_shutdown_action) + return self._perform_post( + self._get_role_instance_operations_path( + service_name, deployment_name, role_name), + _XmlSerializer.shutdown_role_operation_to_xml(post_shutdown_action), + async=True) + + def shutdown_roles(self, service_name, deployment_name, role_names, + post_shutdown_action='Stopped'): + ''' + Shuts down the specified virtual machines. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_names: The names of the roles, as an enumerable of strings. + post_shutdown_action: + Specifies how the Virtual Machine should be shut down. Values are: + Stopped + Shuts down the Virtual Machine but retains the compute + resources. You will continue to be billed for the resources + that the stopped machine uses. + StoppedDeallocated + Shuts down the Virtual Machine and releases the compute + resources. You are not billed for the compute resources that + this Virtual Machine uses. If a static Virtual Network IP + address is assigned to the Virtual Machine, it is reserved. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_names', role_names) + _validate_not_none('post_shutdown_action', post_shutdown_action) + return self._perform_post( + self._get_roles_operations_path(service_name, deployment_name), + _XmlSerializer.shutdown_roles_operation_to_xml( + role_names, post_shutdown_action), + async=True) + + def add_dns_server(self, service_name, deployment_name, dns_server_name, address): + ''' + Adds a DNS server definition to an existing deployment. + + service_name: The name of the service. + deployment_name: The name of the deployment. + dns_server_name: Specifies the name of the DNS server. + address: Specifies the IP address of the DNS server. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('dns_server_name', dns_server_name) + _validate_not_none('address', address) + return self._perform_post( + self._get_dns_server_path(service_name, deployment_name), + _XmlSerializer.dns_server_to_xml(dns_server_name, address), + async=True) + + def update_dns_server(self, service_name, deployment_name, dns_server_name, address): + ''' + Updates the ip address of a DNS server. + + service_name: The name of the service. + deployment_name: The name of the deployment. + dns_server_name: Specifies the name of the DNS server. + address: Specifies the IP address of the DNS server. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('dns_server_name', dns_server_name) + _validate_not_none('address', address) + return self._perform_put( + self._get_dns_server_path(service_name, + deployment_name, + dns_server_name), + _XmlSerializer.dns_server_to_xml(dns_server_name, address), + async=True) + + def delete_dns_server(self, service_name, deployment_name, dns_server_name): + ''' + Deletes a DNS server from a deployment. + + service_name: The name of the service. + deployment_name: The name of the deployment. + dns_server_name: Name of the DNS server that you want to delete. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('dns_server_name', dns_server_name) + return self._perform_delete( + self._get_dns_server_path(service_name, + deployment_name, + dns_server_name), + async=True) + + def list_resource_extensions(self): + ''' + Lists the resource extensions that are available to add to a + Virtual Machine. + ''' + return self._perform_get(self._get_resource_extensions_path(), + ResourceExtensions) + + def list_resource_extension_versions(self, publisher_name, extension_name): + ''' + Lists the versions of a resource extension that are available to add + to a Virtual Machine. + + publisher_name: Name of the resource extension publisher. + extension_name: Name of the resource extension. + ''' + return self._perform_get(self._get_resource_extension_versions_path( + publisher_name, extension_name), + ResourceExtensions) + + #--Operations for virtual machine images ----------------------------- + def capture_vm_image(self, service_name, deployment_name, role_name, options): + ''' + Creates a copy of the operating system virtual hard disk (VHD) and all + of the data VHDs that are associated with the Virtual Machine, saves + the VHD copies in the same storage location as the original VHDs, and + registers the copies as a VM Image in the image repository that is + associated with the specified subscription. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + options: An instance of CaptureRoleAsVMImage class. + options.os_state: + Required. Specifies the state of the operating system in the image. + Possible values are: Generalized, Specialized + A Virtual Machine that is fully configured and running contains a + Specialized operating system. A Virtual Machine on which the + Sysprep command has been run with the generalize option contains a + Generalized operating system. If you capture an image from a + generalized Virtual Machine, the machine is deleted after the image + is captured. It is recommended that all Virtual Machines are shut + down before capturing an image. + options.vm_image_name: + Required. Specifies the name of the VM Image. + options.vm_image_name: + Required. Specifies the label of the VM Image. + options.description: + Optional. Specifies the description of the VM Image. + options.language: + Optional. Specifies the language of the VM Image. + options.image_family: + Optional. Specifies a value that can be used to group VM Images. + options.recommended_vm_size: + Optional. Specifies the size to use for the Virtual Machine that + is created from the VM Image. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('options', options) + _validate_not_none('options.os_state', options.os_state) + _validate_not_none('options.vm_image_name', options.vm_image_name) + _validate_not_none('options.vm_image_label', options.vm_image_label) + return self._perform_post( + self._get_capture_vm_image_path(service_name, deployment_name, role_name), + _XmlSerializer.capture_vm_image_to_xml(options), + async=True) + + def create_vm_image(self, vm_image): + ''' + Creates a VM Image in the image repository that is associated with the + specified subscription using a specified set of virtual hard disks. + + vm_image: An instance of VMImage class. + vm_image.name: Required. Specifies the name of the image. + vm_image.label: Required. Specifies an identifier for the image. + vm_image.description: Optional. Specifies the description of the image. + vm_image.os_disk_configuration: + Required. Specifies configuration information for the operating + system disk that is associated with the image. + vm_image.os_disk_configuration.host_caching: + Optional. Specifies the caching behavior of the operating system disk. + Possible values are: None, ReadOnly, ReadWrite + vm_image.os_disk_configuration.os_state: + Required. Specifies the state of the operating system in the image. + Possible values are: Generalized, Specialized + A Virtual Machine that is fully configured and running contains a + Specialized operating system. A Virtual Machine on which the + Sysprep command has been run with the generalize option contains a + Generalized operating system. + vm_image.os_disk_configuration.os: + Required. Specifies the operating system type of the image. + vm_image.os_disk_configuration.media_link: + Required. Specifies the location of the blob in Windows Azure + storage. The blob location belongs to a storage account in the + subscription specified by the <subscription-id> value in the + operation call. + vm_image.data_disk_configurations: + Optional. Specifies configuration information for the data disks + that are associated with the image. A VM Image might not have data + disks associated with it. + vm_image.data_disk_configurations[].host_caching: + Optional. Specifies the caching behavior of the data disk. + Possible values are: None, ReadOnly, ReadWrite + vm_image.data_disk_configurations[].lun: + Optional if the lun for the disk is 0. Specifies the Logical Unit + Number (LUN) for the data disk. + vm_image.data_disk_configurations[].media_link: + Required. Specifies the location of the blob in Windows Azure + storage. The blob location belongs to a storage account in the + subscription specified by the <subscription-id> value in the + operation call. + vm_image.data_disk_configurations[].logical_size_in_gb: + Required. Specifies the size, in GB, of the data disk. + vm_image.language: Optional. Specifies the language of the image. + vm_image.image_family: + Optional. Specifies a value that can be used to group VM Images. + vm_image.recommended_vm_size: + Optional. Specifies the size to use for the Virtual Machine that + is created from the VM Image. + vm_image.eula: + Optional. Specifies the End User License Agreement that is + associated with the image. The value for this element is a string, + but it is recommended that the value be a URL that points to a EULA. + vm_image.icon_uri: + Optional. Specifies the URI to the icon that is displayed for the + image in the Management Portal. + vm_image.small_icon_uri: + Optional. Specifies the URI to the small icon that is displayed for + the image in the Management Portal. + vm_image.privacy_uri: + Optional. Specifies the URI that points to a document that contains + the privacy policy related to the image. + vm_image.published_date: + Optional. Specifies the date when the image was added to the image + repository. + vm_image.show_in_gui: + Optional. Indicates whether the VM Images should be listed in the + portal. + ''' + _validate_not_none('vm_image', vm_image) + _validate_not_none('vm_image.name', vm_image.name) + _validate_not_none('vm_image.label', vm_image.label) + _validate_not_none('vm_image.os_disk_configuration.os_state', + vm_image.os_disk_configuration.os_state) + _validate_not_none('vm_image.os_disk_configuration.os', + vm_image.os_disk_configuration.os) + _validate_not_none('vm_image.os_disk_configuration.media_link', + vm_image.os_disk_configuration.media_link) + return self._perform_post( + self._get_vm_image_path(), + _XmlSerializer.create_vm_image_to_xml(vm_image), + async=True) + + def delete_vm_image(self, vm_image_name, delete_vhd=False): + ''' + Deletes the specified VM Image from the image repository that is + associated with the specified subscription. + + vm_image_name: The name of the image. + delete_vhd: Deletes the underlying vhd blob in Azure storage. + ''' + _validate_not_none('vm_image_name', vm_image_name) + path = self._get_vm_image_path(vm_image_name) + if delete_vhd: + path += '?comp=media' + return self._perform_delete(path, async=True) + + def list_vm_images(self, location=None, publisher=None, category=None): + ''' + Retrieves a list of the VM Images from the image repository that is + associated with the specified subscription. + ''' + path = self._get_vm_image_path() + query = '' + if location: + query += '&location=' + location + if publisher: + query += '&publisher=' + publisher + if category: + query += '&category=' + category + if query: + path = path + '?' + query.lstrip('&') + return self._perform_get(path, VMImages) + + def update_vm_image(self, vm_image_name, vm_image): + ''' + Updates a VM Image in the image repository that is associated with the + specified subscription. + + vm_image_name: Name of image to update. + vm_image: An instance of VMImage class. + vm_image.label: Optional. Specifies an identifier for the image. + vm_image.os_disk_configuration: + Required. Specifies configuration information for the operating + system disk that is associated with the image. + vm_image.os_disk_configuration.host_caching: + Optional. Specifies the caching behavior of the operating system disk. + Possible values are: None, ReadOnly, ReadWrite + vm_image.data_disk_configurations: + Optional. Specifies configuration information for the data disks + that are associated with the image. A VM Image might not have data + disks associated with it. + vm_image.data_disk_configurations[].name: + Required. Specifies the name of the data disk. + vm_image.data_disk_configurations[].host_caching: + Optional. Specifies the caching behavior of the data disk. + Possible values are: None, ReadOnly, ReadWrite + vm_image.data_disk_configurations[].lun: + Optional if the lun for the disk is 0. Specifies the Logical Unit + Number (LUN) for the data disk. + vm_image.description: Optional. Specifies the description of the image. + vm_image.language: Optional. Specifies the language of the image. + vm_image.image_family: + Optional. Specifies a value that can be used to group VM Images. + vm_image.recommended_vm_size: + Optional. Specifies the size to use for the Virtual Machine that + is created from the VM Image. + vm_image.eula: + Optional. Specifies the End User License Agreement that is + associated with the image. The value for this element is a string, + but it is recommended that the value be a URL that points to a EULA. + vm_image.icon_uri: + Optional. Specifies the URI to the icon that is displayed for the + image in the Management Portal. + vm_image.small_icon_uri: + Optional. Specifies the URI to the small icon that is displayed for + the image in the Management Portal. + vm_image.privacy_uri: + Optional. Specifies the URI that points to a document that contains + the privacy policy related to the image. + vm_image.published_date: + Optional. Specifies the date when the image was added to the image + repository. + vm_image.show_in_gui: + Optional. Indicates whether the VM Images should be listed in the + portal. + ''' + _validate_not_none('vm_image_name', vm_image_name) + _validate_not_none('vm_image', vm_image) + return self._perform_put(self._get_vm_image_path(vm_image_name), + _XmlSerializer.update_vm_image_to_xml(vm_image), + async=True) + + #--Operations for operating system images ---------------------------- + def list_os_images(self): + ''' + Retrieves a list of the OS images from the image repository. + ''' + return self._perform_get(self._get_image_path(), + Images) + + def get_os_image(self, image_name): + ''' + Retrieves an OS image from the image repository. + ''' + return self._perform_get(self._get_image_path(image_name), + OSImage) + + def add_os_image(self, label, media_link, name, os): + ''' + Adds an OS image that is currently stored in a storage account in your + subscription to the image repository. + + label: Specifies the friendly name of the image. + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the image is located. The blob location must + belong to a storage account in the subscription specified by the + <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + name: + Specifies a name for the OS image that Windows Azure uses to + identify the image when creating one or more virtual machines. + os: + The operating system type of the OS image. Possible values are: + Linux, Windows + ''' + _validate_not_none('label', label) + _validate_not_none('media_link', media_link) + _validate_not_none('name', name) + _validate_not_none('os', os) + return self._perform_post(self._get_image_path(), + _XmlSerializer.os_image_to_xml( + label, media_link, name, os), + async=True) + + def update_os_image(self, image_name, label, media_link, name, os): + ''' + Updates an OS image that in your image repository. + + image_name: The name of the image to update. + label: + Specifies the friendly name of the image to be updated. You cannot + use this operation to update images provided by the Windows Azure + platform. + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the image is located. The blob location must + belong to a storage account in the subscription specified by the + <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + name: + Specifies a name for the OS image that Windows Azure uses to + identify the image when creating one or more VM Roles. + os: + The operating system type of the OS image. Possible values are: + Linux, Windows + ''' + _validate_not_none('image_name', image_name) + _validate_not_none('label', label) + _validate_not_none('media_link', media_link) + _validate_not_none('name', name) + _validate_not_none('os', os) + return self._perform_put(self._get_image_path(image_name), + _XmlSerializer.os_image_to_xml( + label, media_link, name, os), + async=True) + + def delete_os_image(self, image_name, delete_vhd=False): + ''' + Deletes the specified OS image from your image repository. + + image_name: The name of the image. + delete_vhd: Deletes the underlying vhd blob in Azure storage. + ''' + _validate_not_none('image_name', image_name) + path = self._get_image_path(image_name) + if delete_vhd: + path += '?comp=media' + return self._perform_delete(path, async=True) + + #--Operations for virtual machine disks ------------------------------ + def get_data_disk(self, service_name, deployment_name, role_name, lun): + ''' + Retrieves the specified data disk from a virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + lun: The Logical Unit Number (LUN) for the disk. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('lun', lun) + return self._perform_get( + self._get_data_disk_path( + service_name, deployment_name, role_name, lun), + DataVirtualHardDisk) + + def add_data_disk(self, service_name, deployment_name, role_name, lun, + host_caching=None, media_link=None, disk_label=None, + disk_name=None, logical_disk_size_in_gb=None, + source_media_link=None): + ''' + Adds a data disk to a virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + lun: + Specifies the Logical Unit Number (LUN) for the disk. The LUN + specifies the slot in which the data drive appears when mounted + for usage by the virtual machine. Valid LUN values are 0 through 15. + host_caching: + Specifies the platform caching behavior of data disk blob for + read/write efficiency. The default vault is ReadOnly. Possible + values are: None, ReadOnly, ReadWrite + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the disk is located. The blob location must + belong to the storage account in the subscription specified by the + <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + disk_label: + Specifies the description of the data disk. When you attach a disk, + either by directly referencing a media using the MediaLink element + or specifying the target disk size, you can use the DiskLabel + element to customize the name property of the target data disk. + disk_name: + Specifies the name of the disk. Windows Azure uses the specified + disk to create the data disk for the machine and populates this + field with the disk name. + logical_disk_size_in_gb: + Specifies the size, in GB, of an empty disk to be attached to the + role. The disk can be created as part of disk attach or create VM + role call by specifying the value for this property. Windows Azure + creates the empty disk based on size preference and attaches the + newly created disk to the Role. + source_media_link: + Specifies the location of a blob in account storage which is + mounted as a data disk when the virtual machine is created. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('lun', lun) + return self._perform_post( + self._get_data_disk_path(service_name, deployment_name, role_name), + _XmlSerializer.data_virtual_hard_disk_to_xml( + host_caching, + disk_label, + disk_name, + lun, + logical_disk_size_in_gb, + media_link, + source_media_link), + async=True) + + def update_data_disk(self, service_name, deployment_name, role_name, lun, + host_caching=None, media_link=None, updated_lun=None, + disk_label=None, disk_name=None, + logical_disk_size_in_gb=None): + ''' + Updates the specified data disk attached to the specified virtual + machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + lun: + Specifies the Logical Unit Number (LUN) for the disk. The LUN + specifies the slot in which the data drive appears when mounted + for usage by the virtual machine. Valid LUN values are 0 through + 15. + host_caching: + Specifies the platform caching behavior of data disk blob for + read/write efficiency. The default vault is ReadOnly. Possible + values are: None, ReadOnly, ReadWrite + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the disk is located. The blob location must + belong to the storage account in the subscription specified by + the <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + updated_lun: + Specifies the Logical Unit Number (LUN) for the disk. The LUN + specifies the slot in which the data drive appears when mounted + for usage by the virtual machine. Valid LUN values are 0 through 15. + disk_label: + Specifies the description of the data disk. When you attach a disk, + either by directly referencing a media using the MediaLink element + or specifying the target disk size, you can use the DiskLabel + element to customize the name property of the target data disk. + disk_name: + Specifies the name of the disk. Windows Azure uses the specified + disk to create the data disk for the machine and populates this + field with the disk name. + logical_disk_size_in_gb: + Specifies the size, in GB, of an empty disk to be attached to the + role. The disk can be created as part of disk attach or create VM + role call by specifying the value for this property. Windows Azure + creates the empty disk based on size preference and attaches the + newly created disk to the Role. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('lun', lun) + return self._perform_put( + self._get_data_disk_path( + service_name, deployment_name, role_name, lun), + _XmlSerializer.data_virtual_hard_disk_to_xml( + host_caching, + disk_label, + disk_name, + updated_lun, + logical_disk_size_in_gb, + media_link, + None), + async=True) + + def delete_data_disk(self, service_name, deployment_name, role_name, lun, delete_vhd=False): + ''' + Removes the specified data disk from a virtual machine. + + service_name: The name of the service. + deployment_name: The name of the deployment. + role_name: The name of the role. + lun: The Logical Unit Number (LUN) for the disk. + delete_vhd: Deletes the underlying vhd blob in Azure storage. + ''' + _validate_not_none('service_name', service_name) + _validate_not_none('deployment_name', deployment_name) + _validate_not_none('role_name', role_name) + _validate_not_none('lun', lun) + path = self._get_data_disk_path(service_name, deployment_name, role_name, lun) + if delete_vhd: + path += '?comp=media' + return self._perform_delete(path, async=True) + + #--Operations for virtual machine disks ------------------------------ + def list_disks(self): + ''' + Retrieves a list of the disks in your image repository. + ''' + return self._perform_get(self._get_disk_path(), + Disks) + + def get_disk(self, disk_name): + ''' + Retrieves a disk from your image repository. + ''' + return self._perform_get(self._get_disk_path(disk_name), + Disk) + + def add_disk(self, has_operating_system, label, media_link, name, os): + ''' + Adds a disk to the user image repository. The disk can be an OS disk + or a data disk. + + has_operating_system: + Specifies whether the disk contains an operation system. Only a + disk with an operating system installed can be mounted as OS Drive. + label: Specifies the description of the disk. + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the disk is located. The blob location must + belong to the storage account in the current subscription specified + by the <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + name: + Specifies a name for the disk. Windows Azure uses the name to + identify the disk when creating virtual machines from the disk. + os: The OS type of the disk. Possible values are: Linux, Windows + ''' + _validate_not_none('has_operating_system', has_operating_system) + _validate_not_none('label', label) + _validate_not_none('media_link', media_link) + _validate_not_none('name', name) + _validate_not_none('os', os) + return self._perform_post(self._get_disk_path(), + _XmlSerializer.disk_to_xml( + has_operating_system, + label, + media_link, + name, + os)) + + def update_disk(self, disk_name, has_operating_system, label, media_link, + name, os): + ''' + Updates an existing disk in your image repository. + + disk_name: The name of the disk to update. + has_operating_system: + Specifies whether the disk contains an operation system. Only a + disk with an operating system installed can be mounted as OS Drive. + label: Specifies the description of the disk. + media_link: + Specifies the location of the blob in Windows Azure blob store + where the media for the disk is located. The blob location must + belong to the storage account in the current subscription specified + by the <subscription-id> value in the operation call. Example: + http://example.blob.core.windows.net/disks/mydisk.vhd + name: + Specifies a name for the disk. Windows Azure uses the name to + identify the disk when creating virtual machines from the disk. + os: The OS type of the disk. Possible values are: Linux, Windows + ''' + _validate_not_none('disk_name', disk_name) + _validate_not_none('has_operating_system', has_operating_system) + _validate_not_none('label', label) + _validate_not_none('media_link', media_link) + _validate_not_none('name', name) + _validate_not_none('os', os) + return self._perform_put(self._get_disk_path(disk_name), + _XmlSerializer.disk_to_xml( + has_operating_system, + label, + media_link, + name, + os)) + + def delete_disk(self, disk_name, delete_vhd=False): + ''' + Deletes the specified data or operating system disk from your image + repository. + + disk_name: The name of the disk to delete. + delete_vhd: Deletes the underlying vhd blob in Azure storage. + ''' + _validate_not_none('disk_name', disk_name) + path = self._get_disk_path(disk_name) + if delete_vhd: + path += '?comp=media' + return self._perform_delete(path) + + #--Operations for virtual networks ------------------------------ + def list_virtual_network_sites(self): + ''' + Retrieves a list of the virtual networks. + ''' + return self._perform_get(self._get_virtual_network_site_path(), VirtualNetworkSites) + + #--Helper functions -------------------------------------------------- + def _get_role_sizes_path(self): + return self._get_path('rolesizes', None) + + def _get_subscriptions_path(self): + return '/subscriptions' + + def _get_virtual_network_site_path(self): + return self._get_path('services/networking/virtualnetwork', None) + + def _get_storage_service_path(self, service_name=None): + return self._get_path('services/storageservices', service_name) + + def _get_hosted_service_path(self, service_name=None): + return self._get_path('services/hostedservices', service_name) + + def _get_deployment_path_using_slot(self, service_name, slot=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deploymentslots', slot) + + def _get_deployment_path_using_name(self, service_name, + deployment_name=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments', deployment_name) + + def _get_role_path(self, service_name, deployment_name, role_name=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + deployment_name + + '/roles', role_name) + + def _get_role_instance_operations_path(self, service_name, deployment_name, + role_name=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + deployment_name + + '/roleinstances', role_name) + '/Operations' + + def _get_roles_operations_path(self, service_name, deployment_name): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + deployment_name + + '/roles/Operations', None) + + def _get_resource_extensions_path(self): + return self._get_path('services/resourceextensions', None) + + def _get_resource_extension_versions_path(self, publisher_name, extension_name): + return self._get_path('services/resourceextensions', + publisher_name + '/' + extension_name) + + def _get_dns_server_path(self, service_name, deployment_name, + dns_server_name=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + deployment_name + + '/dnsservers', dns_server_name) + + def _get_capture_vm_image_path(self, service_name, deployment_name, role_name): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + _str(deployment_name) + + '/roleinstances/' + _str(role_name) + '/Operations', + None) + + def _get_vm_image_path(self, image_name=None): + return self._get_path('services/vmimages', image_name) + + def _get_reserved_ip_path(self, name=None): + return self._get_path('services/networking/reservedips', name) + + def _get_data_disk_path(self, service_name, deployment_name, role_name, + lun=None): + return self._get_path('services/hostedservices/' + _str(service_name) + + '/deployments/' + _str(deployment_name) + + '/roles/' + _str(role_name) + '/DataDisks', lun) + + def _get_disk_path(self, disk_name=None): + return self._get_path('services/disks', disk_name) + + def _get_image_path(self, image_name=None): + return self._get_path('services/images', image_name) diff --git a/awx/lib/site-packages/azure/servicemanagement/sqldatabasemanagementservice.py b/awx/lib/site-packages/azure/servicemanagement/sqldatabasemanagementservice.py new file mode 100644 index 0000000000..741b60f8b4 --- /dev/null +++ b/awx/lib/site-packages/azure/servicemanagement/sqldatabasemanagementservice.py @@ -0,0 +1,390 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + MANAGEMENT_HOST, + _parse_service_resources_response, + _validate_not_none, + ) +from azure.servicemanagement import ( + EventLog, + ServerQuota, + Servers, + ServiceObjective, + Database, + FirewallRule, + _SqlManagementXmlSerializer, + ) +from azure.servicemanagement.servicemanagementclient import ( + _ServiceManagementClient, + ) + +class SqlDatabaseManagementService(_ServiceManagementClient): + ''' Note that this class is a preliminary work on SQL Database + management. Since it lack a lot a features, final version + can be slightly different from the current one. + ''' + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + ''' + Initializes the sql database management service. + + subscription_id: Subscription to manage. + cert_file: + Path to .pem certificate file (httplib), or location of the + certificate in your Personal certificate store (winhttp) in the + CURRENT_USER\my\CertificateName format. + If a request_session is specified, then this is unused. + host: Live ServiceClient URL. Defaults to Azure public cloud. + request_session: + Session object to use for http requests. If this is specified, it + replaces the default use of httplib or winhttp. Also, the cert_file + parameter is unused when a session is passed in. + The session object handles authentication, and as such can support + multiple types of authentication: .pem certificate, oauth. + For example, you can pass in a Session instance from the requests + library. To use .pem certificate authentication with requests + library, set the path to the .pem file on the session.cert + attribute. + ''' + super(SqlDatabaseManagementService, self).__init__( + subscription_id, cert_file, host, request_session) + self.content_type = 'application/xml' + + #--Operations for sql servers ---------------------------------------- + def create_server(self, admin_login, admin_password, location): + ''' + Create a new Azure SQL Database server. + + admin_login: The administrator login name for the new server. + admin_password: The administrator login password for the new server. + location: The region to deploy the new server. + ''' + _validate_not_none('admin_login', admin_login) + _validate_not_none('admin_password', admin_password) + _validate_not_none('location', location) + response = self.perform_post( + self._get_servers_path(), + _SqlManagementXmlSerializer.create_server_to_xml( + admin_login, + admin_password, + location + ) + ) + + return _SqlManagementXmlSerializer.xml_to_create_server_response( + response.body) + + def set_server_admin_password(self, server_name, admin_password): + ''' + Reset the administrator password for a server. + + server_name: Name of the server to change the password. + admin_password: The new administrator password for the server. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('admin_password', admin_password) + return self._perform_post( + self._get_servers_path(server_name) + '?op=ResetPassword', + _SqlManagementXmlSerializer.set_server_admin_password_to_xml( + admin_password + ) + ) + + def delete_server(self, server_name): + ''' + Deletes an Azure SQL Database server (including all its databases). + + server_name: Name of the server you want to delete. + ''' + _validate_not_none('server_name', server_name) + return self._perform_delete( + self._get_servers_path(server_name)) + + def list_servers(self): + ''' + List the SQL servers defined on the account. + ''' + return self._perform_get(self._get_servers_path(), + Servers) + + def list_quotas(self, server_name): + ''' + Gets quotas for an Azure SQL Database Server. + + server_name: Name of the server. + ''' + _validate_not_none('server_name', server_name) + response = self._perform_get(self._get_quotas_path(server_name), + None) + return _parse_service_resources_response(response, ServerQuota) + + def get_server_event_logs(self, server_name, start_date, + interval_size_in_minutes, event_types=''): + ''' + Gets the event logs for an Azure SQL Database Server. + + server_name: Name of the server to retrieve the event logs from. + start_date: + The starting date and time of the events to retrieve in UTC format, + for example '2011-09-28 16:05:00'. + interval_size_in_minutes: + Size of the event logs to retrieve (in minutes). + Valid values are: 5, 60, or 1440. + event_types: + The event type of the log entries you want to retrieve. + Valid values are: + - connection_successful + - connection_failed + - connection_terminated + - deadlock + - throttling + - throttling_long_transaction + To return all event types pass in an empty string. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('start_date', start_date) + _validate_not_none('interval_size_in_minutes', interval_size_in_minutes) + _validate_not_none('event_types', event_types) + path = self._get_server_event_logs_path(server_name) + \ + '?startDate={0}&intervalSizeInMinutes={1}&eventTypes={2}'.format( + start_date, interval_size_in_minutes, event_types) + response = self._perform_get(path, None) + return _parse_service_resources_response(response, EventLog) + + #--Operations for firewall rules ------------------------------------------ + def create_firewall_rule(self, server_name, name, start_ip_address, + end_ip_address): + ''' + Creates an Azure SQL Database server firewall rule. + + server_name: Name of the server to set the firewall rule on. + name: The name of the new firewall rule. + start_ip_address: + The lowest IP address in the range of the server-level firewall + setting. IP addresses equal to or greater than this can attempt to + connect to the server. The lowest possible IP address is 0.0.0.0. + end_ip_address: + The highest IP address in the range of the server-level firewall + setting. IP addresses equal to or less than this can attempt to + connect to the server. The highest possible IP address is + 255.255.255.255. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('name', name) + _validate_not_none('start_ip_address', start_ip_address) + _validate_not_none('end_ip_address', end_ip_address) + return self._perform_post( + self._get_firewall_rules_path(server_name), + _SqlManagementXmlSerializer.create_firewall_rule_to_xml( + name, start_ip_address, end_ip_address + ) + ) + + def update_firewall_rule(self, server_name, name, start_ip_address, + end_ip_address): + ''' + Update a firewall rule for an Azure SQL Database server. + + server_name: Name of the server to set the firewall rule on. + name: The name of the firewall rule to update. + start_ip_address: + The lowest IP address in the range of the server-level firewall + setting. IP addresses equal to or greater than this can attempt to + connect to the server. The lowest possible IP address is 0.0.0.0. + end_ip_address: + The highest IP address in the range of the server-level firewall + setting. IP addresses equal to or less than this can attempt to + connect to the server. The highest possible IP address is + 255.255.255.255. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('name', name) + _validate_not_none('start_ip_address', start_ip_address) + _validate_not_none('end_ip_address', end_ip_address) + return self._perform_put( + self._get_firewall_rules_path(server_name, name), + _SqlManagementXmlSerializer.update_firewall_rule_to_xml( + name, start_ip_address, end_ip_address + ) + ) + + def delete_firewall_rule(self, server_name, name): + ''' + Deletes an Azure SQL Database server firewall rule. + + server_name: + Name of the server with the firewall rule you want to delete. + name: + Name of the firewall rule you want to delete. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('name', name) + return self._perform_delete( + self._get_firewall_rules_path(server_name, name)) + + def list_firewall_rules(self, server_name): + ''' + Retrieves the set of firewall rules for an Azure SQL Database Server. + + server_name: Name of the server. + ''' + _validate_not_none('server_name', server_name) + response = self._perform_get(self._get_firewall_rules_path(server_name), + None) + return _parse_service_resources_response(response, FirewallRule) + + def list_service_level_objectives(self, server_name): + ''' + Gets the service level objectives for an Azure SQL Database server. + + server_name: Name of the server. + ''' + _validate_not_none('server_name', server_name) + response = self._perform_get( + self._get_service_objectives_path(server_name), None) + return _parse_service_resources_response(response, ServiceObjective) + + #--Operations for sql databases ---------------------------------------- + def create_database(self, server_name, name, service_objective_id, + edition=None, collation_name=None, + max_size_bytes=None): + ''' + Creates a new Azure SQL Database. + + server_name: Name of the server to contain the new database. + name: + Required. The name for the new database. See Naming Requirements + in Azure SQL Database General Guidelines and Limitations and + Database Identifiers for more information. + service_objective_id: + Required. The GUID corresponding to the performance level for + Edition. See List Service Level Objectives for current values. + edition: + Optional. The Service Tier (Edition) for the new database. If + omitted, the default is Web. Valid values are Web, Business, + Basic, Standard, and Premium. See Azure SQL Database Service Tiers + (Editions) and Web and Business Edition Sunset FAQ for more + information. + collation_name: + Optional. The database collation. This can be any collation + supported by SQL. If omitted, the default collation is used. See + SQL Server Collation Support in Azure SQL Database General + Guidelines and Limitations for more information. + max_size_bytes: + Optional. Sets the maximum size, in bytes, for the database. This + value must be within the range of allowed values for Edition. If + omitted, the default value for the edition is used. See Azure SQL + Database Service Tiers (Editions) for current maximum databases + sizes. Convert MB or GB values to bytes. + 1 MB = 1048576 bytes. 1 GB = 1073741824 bytes. + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('name', name) + _validate_not_none('service_objective_id', service_objective_id) + return self._perform_post( + self._get_databases_path(server_name), + _SqlManagementXmlSerializer.create_database_to_xml( + name, service_objective_id, edition, collation_name, + max_size_bytes + ) + ) + + def update_database(self, server_name, name, new_database_name=None, + service_objective_id=None, edition=None, + max_size_bytes=None): + ''' + Updates existing database details. + + server_name: Name of the server to contain the new database. + name: + Required. The name for the new database. See Naming Requirements + in Azure SQL Database General Guidelines and Limitations and + Database Identifiers for more information. + new_database_name: + Optional. The new name for the new database. + service_objective_id: + Optional. The new service level to apply to the database. For more + information about service levels, see Azure SQL Database Service + Tiers and Performance Levels. Use List Service Level Objectives to + get the correct ID for the desired service objective. + edition: + Optional. The new edition for the new database. + max_size_bytes: + Optional. The new size of the database in bytes. For information on + available sizes for each edition, see Azure SQL Database Service + Tiers (Editions). + ''' + _validate_not_none('server_name', server_name) + _validate_not_none('name', name) + return self._perform_put( + self._get_databases_path(server_name, name), + _SqlManagementXmlSerializer.update_database_to_xml( + new_database_name, service_objective_id, edition, + max_size_bytes + ) + ) + + def delete_database(self, server_name, name): + ''' + Deletes an Azure SQL Database. + + server_name: Name of the server where the database is located. + name: Name of the database to delete. + ''' + return self._perform_delete(self._get_databases_path(server_name, name)) + + def list_databases(self, name): + ''' + List the SQL databases defined on the specified server name + ''' + response = self._perform_get(self._get_list_databases_path(name), + None) + return _parse_service_resources_response(response, Database) + + + #--Helper functions -------------------------------------------------- + def _get_servers_path(self, server_name=None): + return self._get_path('services/sqlservers/servers', server_name) + + def _get_firewall_rules_path(self, server_name, name=None): + path = self._get_servers_path(server_name) + '/firewallrules' + if name: + path = path + '/' + name + return path + + def _get_databases_path(self, server_name, name=None): + path = self._get_servers_path(server_name) + '/databases' + if name: + path = path + '/' + name + return path + + def _get_server_event_logs_path(self, server_name): + return self._get_servers_path(server_name) + '/events' + + def _get_service_objectives_path(self, server_name): + return self._get_servers_path(server_name) + '/serviceobjectives' + + def _get_quotas_path(self, server_name, name=None): + path = self._get_servers_path(server_name) + '/serverquotas' + if name: + path = path + '/' + name + return path + + def _get_list_databases_path(self, name): + # *contentview=generic is mandatory* + return self._get_path('services/sqlservers/servers/', + name) + '/databases?contentview=generic' + diff --git a/awx/lib/site-packages/azure/servicemanagement/websitemanagementservice.py b/awx/lib/site-packages/azure/servicemanagement/websitemanagementservice.py new file mode 100644 index 0000000000..3faf30562b --- /dev/null +++ b/awx/lib/site-packages/azure/servicemanagement/websitemanagementservice.py @@ -0,0 +1,256 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + MANAGEMENT_HOST, + _str, + ) +from azure.servicemanagement import ( + WebSpaces, + WebSpace, + Sites, + Site, + MetricResponses, + MetricDefinitions, + PublishData, + _XmlSerializer, + ) +from azure.servicemanagement.servicemanagementclient import ( + _ServiceManagementClient, + ) + +class WebsiteManagementService(_ServiceManagementClient): + ''' Note that this class is a preliminary work on WebSite + management. Since it lack a lot a features, final version + can be slightly different from the current one. + ''' + + def __init__(self, subscription_id=None, cert_file=None, + host=MANAGEMENT_HOST, request_session=None): + ''' + Initializes the website management service. + + subscription_id: Subscription to manage. + cert_file: + Path to .pem certificate file (httplib), or location of the + certificate in your Personal certificate store (winhttp) in the + CURRENT_USER\my\CertificateName format. + If a request_session is specified, then this is unused. + host: Live ServiceClient URL. Defaults to Azure public cloud. + request_session: + Session object to use for http requests. If this is specified, it + replaces the default use of httplib or winhttp. Also, the cert_file + parameter is unused when a session is passed in. + The session object handles authentication, and as such can support + multiple types of authentication: .pem certificate, oauth. + For example, you can pass in a Session instance from the requests + library. To use .pem certificate authentication with requests + library, set the path to the .pem file on the session.cert + attribute. + ''' + super(WebsiteManagementService, self).__init__( + subscription_id, cert_file, host, request_session) + + #--Operations for web sites ---------------------------------------- + def list_webspaces(self): + ''' + List the webspaces defined on the account. + ''' + return self._perform_get(self._get_list_webspaces_path(), + WebSpaces) + + def get_webspace(self, webspace_name): + ''' + Get details of a specific webspace. + + webspace_name: The name of the webspace. + ''' + return self._perform_get(self._get_webspace_details_path(webspace_name), + WebSpace) + + def list_sites(self, webspace_name): + ''' + List the web sites defined on this webspace. + + webspace_name: The name of the webspace. + ''' + return self._perform_get(self._get_sites_path(webspace_name), + Sites) + + def get_site(self, webspace_name, website_name): + ''' + List the web sites defined on this webspace. + + webspace_name: The name of the webspace. + website_name: The name of the website. + ''' + return self._perform_get(self._get_sites_details_path(webspace_name, + website_name), + Site) + + def create_site(self, webspace_name, website_name, geo_region, host_names, + plan='VirtualDedicatedPlan', compute_mode='Shared', + server_farm=None, site_mode=None): + ''' + Create a website. + + webspace_name: The name of the webspace. + website_name: The name of the website. + geo_region: + The geographical region of the webspace that will be created. + host_names: + An array of fully qualified domain names for website. Only one + hostname can be specified in the azurewebsites.net domain. + The hostname should match the name of the website. Custom domains + can only be specified for Shared or Standard websites. + plan: + This value must be 'VirtualDedicatedPlan'. + compute_mode: + This value should be 'Shared' for the Free or Paid Shared + offerings, or 'Dedicated' for the Standard offering. The default + value is 'Shared'. If you set it to 'Dedicated', you must specify + a value for the server_farm parameter. + server_farm: + The name of the Server Farm associated with this website. This is + a required value for Standard mode. + site_mode: + Can be None, 'Limited' or 'Basic'. This value is 'Limited' for the + Free offering, and 'Basic' for the Paid Shared offering. Standard + mode does not use the site_mode parameter; it uses the compute_mode + parameter. + ''' + xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode) + return self._perform_post( + self._get_sites_path(webspace_name), + xml, + Site) + + def delete_site(self, webspace_name, website_name, + delete_empty_server_farm=False, delete_metrics=False): + ''' + Delete a website. + + webspace_name: The name of the webspace. + website_name: The name of the website. + delete_empty_server_farm: + If the site being deleted is the last web site in a server farm, + you can delete the server farm by setting this to True. + delete_metrics: + To also delete the metrics for the site that you are deleting, you + can set this to True. + ''' + path = self._get_sites_details_path(webspace_name, website_name) + query = '' + if delete_empty_server_farm: + query += '&deleteEmptyServerFarm=true' + if delete_metrics: + query += '&deleteMetrics=true' + if query: + path = path + '?' + query.lstrip('&') + return self._perform_delete(path) + + def restart_site(self, webspace_name, website_name): + ''' + Restart a web site. + + webspace_name: The name of the webspace. + website_name: The name of the website. + ''' + return self._perform_post( + self._get_restart_path(webspace_name, website_name), + '') + + def get_historical_usage_metrics(self, webspace_name, website_name, + metrics = None, start_time=None, end_time=None, time_grain=None): + ''' + Get historical usage metrics. + + webspace_name: The name of the webspace. + website_name: The name of the website. + metrics: Optional. List of metrics name. Otherwise, all metrics returned. + start_time: Optional. An ISO8601 date. Otherwise, current hour is used. + end_time: Optional. An ISO8601 date. Otherwise, current time is used. + time_grain: Optional. A rollup name, as P1D. OTherwise, default rollup for the metrics is used. + More information and metrics name at: + http://msdn.microsoft.com/en-us/library/azure/dn166964.aspx + ''' + metrics = ('names='+','.join(metrics)) if metrics else '' + start_time = ('StartTime='+start_time) if start_time else '' + end_time = ('EndTime='+end_time) if end_time else '' + time_grain = ('TimeGrain='+time_grain) if time_grain else '' + parameters = ('&'.join(v for v in (metrics, start_time, end_time, time_grain) if v)) + parameters = '?'+parameters if parameters else '' + return self._perform_get(self._get_historical_usage_metrics_path(webspace_name, website_name) + parameters, + MetricResponses) + + def get_metric_definitions(self, webspace_name, website_name): + ''' + Get metric definitions of metrics available of this web site. + + webspace_name: The name of the webspace. + website_name: The name of the website. + ''' + return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name), + MetricDefinitions) + + def get_publish_profile_xml(self, webspace_name, website_name): + ''' + Get a site's publish profile as a string + + webspace_name: The name of the webspace. + website_name: The name of the website. + ''' + return self._perform_get(self._get_publishxml_path(webspace_name, website_name), + None).body.decode("utf-8") + + def get_publish_profile(self, webspace_name, website_name): + ''' + Get a site's publish profile as an object + + webspace_name: The name of the webspace. + website_name: The name of the website. + ''' + return self._perform_get(self._get_publishxml_path(webspace_name, website_name), + PublishData) + + #--Helper functions -------------------------------------------------- + def _get_list_webspaces_path(self): + return self._get_path('services/webspaces', None) + + def _get_webspace_details_path(self, webspace_name): + return self._get_path('services/webspaces/', webspace_name) + + def _get_sites_path(self, webspace_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites' + + def _get_sites_details_path(self, webspace_name, website_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites/' + _str(website_name) + + def _get_restart_path(self, webspace_name, website_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites/' + _str(website_name) + '/restart/' + + def _get_historical_usage_metrics_path(self, webspace_name, website_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites/' + _str(website_name) + '/metrics/' + + def _get_metric_definitions_path(self, webspace_name, website_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites/' + _str(website_name) + '/metricdefinitions/' + + def _get_publishxml_path(self, webspace_name, website_name): + return self._get_path('services/webspaces/', + webspace_name) + '/sites/' + _str(website_name) + '/publishxml/' diff --git a/awx/lib/site-packages/azure/storage/__init__.py b/awx/lib/site-packages/azure/storage/__init__.py index 5a28afd0bb..6089d3ff2a 100644 --- a/awx/lib/site-packages/azure/storage/__init__.py +++ b/awx/lib/site-packages/azure/storage/__init__.py @@ -1,913 +1,901 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import hashlib -import hmac -import sys -import types - -from datetime import datetime -from xml.dom import minidom -from azure import (WindowsAzureData, - WindowsAzureError, - METADATA_NS, - xml_escape, - _create_entry, - _decode_base64_to_text, - _decode_base64_to_bytes, - _encode_base64, - _fill_data_minidom, - _fill_instance_element, - _get_child_nodes, - _get_child_nodesNS, - _get_children_from_path, - _get_entry_properties, - _general_error_handler, - _list_of, - _parse_response_for_dict, - _unicode_type, - _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY, - ) - -# x-ms-version for storage service. -X_MS_VERSION = '2012-02-12' - - -class EnumResultsBase(object): - - ''' base class for EnumResults. ''' - - def __init__(self): - self.prefix = u'' - self.marker = u'' - self.max_results = 0 - self.next_marker = u'' - - -class ContainerEnumResults(EnumResultsBase): - - ''' Blob Container list. ''' - - def __init__(self): - EnumResultsBase.__init__(self) - self.containers = _list_of(Container) - - def __iter__(self): - return iter(self.containers) - - def __len__(self): - return len(self.containers) - - def __getitem__(self, index): - return self.containers[index] - - -class Container(WindowsAzureData): - - ''' Blob container class. ''' - - def __init__(self): - self.name = u'' - self.url = u'' - self.properties = Properties() - self.metadata = {} - - -class Properties(WindowsAzureData): - - ''' Blob container's properties class. ''' - - def __init__(self): - self.last_modified = u'' - self.etag = u'' - - -class RetentionPolicy(WindowsAzureData): - - ''' RetentionPolicy in service properties. ''' - - def __init__(self): - self.enabled = False - self.__dict__['days'] = None - - def get_days(self): - # convert days to int value - return int(self.__dict__['days']) - - def set_days(self, value): - ''' set default days if days is set to empty. ''' - self.__dict__['days'] = value - - days = property(fget=get_days, fset=set_days) - - -class Logging(WindowsAzureData): - - ''' Logging class in service properties. ''' - - def __init__(self): - self.version = u'1.0' - self.delete = False - self.read = False - self.write = False - self.retention_policy = RetentionPolicy() - - -class Metrics(WindowsAzureData): - - ''' Metrics class in service properties. ''' - - def __init__(self): - self.version = u'1.0' - self.enabled = False - self.include_apis = None - self.retention_policy = RetentionPolicy() - - -class StorageServiceProperties(WindowsAzureData): - - ''' Storage Service Propeties class. ''' - - def __init__(self): - self.logging = Logging() - self.metrics = Metrics() - - -class AccessPolicy(WindowsAzureData): - - ''' Access Policy class in service properties. ''' - - def __init__(self, start=u'', expiry=u'', permission='u'): - self.start = start - self.expiry = expiry - self.permission = permission - - -class SignedIdentifier(WindowsAzureData): - - ''' Signed Identifier class for service properties. ''' - - def __init__(self): - self.id = u'' - self.access_policy = AccessPolicy() - - -class SignedIdentifiers(WindowsAzureData): - - ''' SignedIdentifier list. ''' - - def __init__(self): - self.signed_identifiers = _list_of(SignedIdentifier) - - def __iter__(self): - return iter(self.signed_identifiers) - - def __len__(self): - return len(self.signed_identifiers) - - def __getitem__(self, index): - return self.signed_identifiers[index] - - -class BlobEnumResults(EnumResultsBase): - - ''' Blob list.''' - - def __init__(self): - EnumResultsBase.__init__(self) - self.blobs = _list_of(Blob) - self.prefixes = _list_of(BlobPrefix) - self.delimiter = '' - - def __iter__(self): - return iter(self.blobs) - - def __len__(self): - return len(self.blobs) - - def __getitem__(self, index): - return self.blobs[index] - - -class BlobResult(bytes): - - def __new__(cls, blob, properties): - return bytes.__new__(cls, blob if blob else b'') - - def __init__(self, blob, properties): - self.properties = properties - - -class Blob(WindowsAzureData): - - ''' Blob class. ''' - - def __init__(self): - self.name = u'' - self.snapshot = u'' - self.url = u'' - self.properties = BlobProperties() - self.metadata = {} - - -class BlobProperties(WindowsAzureData): - - ''' Blob Properties ''' - - def __init__(self): - self.last_modified = u'' - self.etag = u'' - self.content_length = 0 - self.content_type = u'' - self.content_encoding = u'' - self.content_language = u'' - self.content_md5 = u'' - self.xms_blob_sequence_number = 0 - self.blob_type = u'' - self.lease_status = u'' - self.lease_state = u'' - self.lease_duration = u'' - self.copy_id = u'' - self.copy_source = u'' - self.copy_status = u'' - self.copy_progress = u'' - self.copy_completion_time = u'' - self.copy_status_description = u'' - - -class BlobPrefix(WindowsAzureData): - - ''' BlobPrefix in Blob. ''' - - def __init__(self): - self.name = '' - - -class BlobBlock(WindowsAzureData): - - ''' BlobBlock class ''' - - def __init__(self, id=None, size=None): - self.id = id - self.size = size - - -class BlobBlockList(WindowsAzureData): - - ''' BlobBlockList class ''' - - def __init__(self): - self.committed_blocks = [] - self.uncommitted_blocks = [] - - -class PageRange(WindowsAzureData): - - ''' Page Range for page blob. ''' - - def __init__(self): - self.start = 0 - self.end = 0 - - -class PageList(object): - - ''' Page list for page blob. ''' - - def __init__(self): - self.page_ranges = _list_of(PageRange) - - def __iter__(self): - return iter(self.page_ranges) - - def __len__(self): - return len(self.page_ranges) - - def __getitem__(self, index): - return self.page_ranges[index] - - -class QueueEnumResults(EnumResultsBase): - - ''' Queue list''' - - def __init__(self): - EnumResultsBase.__init__(self) - self.queues = _list_of(Queue) - - def __iter__(self): - return iter(self.queues) - - def __len__(self): - return len(self.queues) - - def __getitem__(self, index): - return self.queues[index] - - -class Queue(WindowsAzureData): - - ''' Queue class ''' - - def __init__(self): - self.name = u'' - self.url = u'' - self.metadata = {} - - -class QueueMessagesList(WindowsAzureData): - - ''' Queue message list. ''' - - def __init__(self): - self.queue_messages = _list_of(QueueMessage) - - def __iter__(self): - return iter(self.queue_messages) - - def __len__(self): - return len(self.queue_messages) - - def __getitem__(self, index): - return self.queue_messages[index] - - -class QueueMessage(WindowsAzureData): - - ''' Queue message class. ''' - - def __init__(self): - self.message_id = u'' - self.insertion_time = u'' - self.expiration_time = u'' - self.pop_receipt = u'' - self.time_next_visible = u'' - self.dequeue_count = u'' - self.message_text = u'' - - -class Entity(WindowsAzureData): - - ''' Entity class. The attributes of entity will be created dynamically. ''' - pass - - -class EntityProperty(WindowsAzureData): - - ''' Entity property. contains type and value. ''' - - def __init__(self, type=None, value=None): - self.type = type - self.value = value - - -class Table(WindowsAzureData): - - ''' Only for intellicens and telling user the return type. ''' - pass - - -def _parse_blob_enum_results_list(response): - respbody = response.body - return_obj = BlobEnumResults() - doc = minidom.parseString(respbody) - - for enum_results in _get_child_nodes(doc, 'EnumerationResults'): - for child in _get_children_from_path(enum_results, 'Blobs', 'Blob'): - return_obj.blobs.append(_fill_instance_element(child, Blob)) - - for child in _get_children_from_path(enum_results, - 'Blobs', - 'BlobPrefix'): - return_obj.prefixes.append( - _fill_instance_element(child, BlobPrefix)) - - for name, value in vars(return_obj).items(): - if name == 'blobs' or name == 'prefixes': - continue - value = _fill_data_minidom(enum_results, name, value) - if value is not None: - setattr(return_obj, name, value) - - return return_obj - - -def _update_storage_header(request): - ''' add additional headers for storage request. ''' - if request.body: - assert isinstance(request.body, bytes) - - # if it is PUT, POST, MERGE, DELETE, need to add content-lengt to header. - if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: - request.headers.append(('Content-Length', str(len(request.body)))) - - # append addtional headers base on the service - request.headers.append(('x-ms-version', X_MS_VERSION)) - - # append x-ms-meta name, values to header - for name, value in request.headers: - if 'x-ms-meta-name-values' in name and value: - for meta_name, meta_value in value.items(): - request.headers.append(('x-ms-meta-' + meta_name, meta_value)) - request.headers.remove((name, value)) - break - return request - - -def _update_storage_blob_header(request, account_name, account_key): - ''' add additional headers for storage blob request. ''' - - request = _update_storage_header(request) - current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') - request.headers.append(('x-ms-date', current_time)) - request.headers.append( - ('Content-Type', 'application/octet-stream Charset=UTF-8')) - request.headers.append(('Authorization', - _sign_storage_blob_request(request, - account_name, - account_key))) - - return request.headers - - -def _update_storage_queue_header(request, account_name, account_key): - ''' add additional headers for storage queue request. ''' - return _update_storage_blob_header(request, account_name, account_key) - - -def _update_storage_table_header(request): - ''' add additional headers for storage table request. ''' - - request = _update_storage_header(request) - for name, _ in request.headers: - if name.lower() == 'content-type': - break - else: - request.headers.append(('Content-Type', 'application/atom+xml')) - request.headers.append(('DataServiceVersion', '2.0;NetFx')) - request.headers.append(('MaxDataServiceVersion', '2.0;NetFx')) - current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') - request.headers.append(('x-ms-date', current_time)) - request.headers.append(('Date', current_time)) - return request.headers - - -def _sign_storage_blob_request(request, account_name, account_key): - ''' - Returns the signed string for blob request which is used to set - Authorization header. This is also used to sign queue request. - ''' - - uri_path = request.path.split('?')[0] - - # method to sign - string_to_sign = request.method + '\n' - - # get headers to sign - headers_to_sign = [ - 'content-encoding', 'content-language', 'content-length', - 'content-md5', 'content-type', 'date', 'if-modified-since', - 'if-match', 'if-none-match', 'if-unmodified-since', 'range'] - - request_header_dict = dict((name.lower(), value) - for name, value in request.headers if value) - string_to_sign += '\n'.join(request_header_dict.get(x, '') - for x in headers_to_sign) + '\n' - - # get x-ms header to sign - x_ms_headers = [] - for name, value in request.headers: - if 'x-ms' in name: - x_ms_headers.append((name.lower(), value)) - x_ms_headers.sort() - for name, value in x_ms_headers: - if value: - string_to_sign += ''.join([name, ':', value, '\n']) - - # get account_name and uri path to sign - string_to_sign += '/' + account_name + uri_path - - # get query string to sign if it is not table service - query_to_sign = request.query - query_to_sign.sort() - - current_name = '' - for name, value in query_to_sign: - if value: - if current_name != name: - string_to_sign += '\n' + name + ':' + value - else: - string_to_sign += '\n' + ',' + value - - # sign the request - auth_string = 'SharedKey ' + account_name + ':' + \ - _sign_string(account_key, string_to_sign) - return auth_string - - -def _sign_storage_table_request(request, account_name, account_key): - uri_path = request.path.split('?')[0] - - string_to_sign = request.method + '\n' - headers_to_sign = ['content-md5', 'content-type', 'date'] - request_header_dict = dict((name.lower(), value) - for name, value in request.headers if value) - string_to_sign += '\n'.join(request_header_dict.get(x, '') - for x in headers_to_sign) + '\n' - - # get account_name and uri path to sign - string_to_sign += ''.join(['/', account_name, uri_path]) - - for name, value in request.query: - if name == 'comp' and uri_path == '/': - string_to_sign += '?comp=' + value - break - - # sign the request - auth_string = 'SharedKey ' + account_name + ':' + \ - _sign_string(account_key, string_to_sign) - return auth_string - - -def _sign_string(account_key, string_to_sign): - decoded_account_key = _decode_base64_to_bytes(account_key) - if isinstance(string_to_sign, _unicode_type): - string_to_sign = string_to_sign.encode('utf-8') - signed_hmac_sha256 = hmac.HMAC( - decoded_account_key, string_to_sign, hashlib.sha256) - digest = signed_hmac_sha256.digest() - encoded_digest = _encode_base64(digest) - return encoded_digest - - -def _to_python_bool(value): - if value.lower() == 'true': - return True - return False - - -def _to_entity_int(data): - int_max = (2 << 30) - 1 - if data > (int_max) or data < (int_max + 1) * (-1): - return 'Edm.Int64', str(data) - else: - return 'Edm.Int32', str(data) - - -def _to_entity_bool(value): - if value: - return 'Edm.Boolean', 'true' - return 'Edm.Boolean', 'false' - - -def _to_entity_datetime(value): - return 'Edm.DateTime', value.strftime('%Y-%m-%dT%H:%M:%S') - - -def _to_entity_float(value): - return 'Edm.Double', str(value) - - -def _to_entity_property(value): - if value.type == 'Edm.Binary': - return value.type, _encode_base64(value.value) - - return value.type, str(value.value) - - -def _to_entity_none(value): - return None, None - - -def _to_entity_str(value): - return 'Edm.String', value - - -# Tables of conversions to and from entity types. We support specific -# datatypes, and beyond that the user can use an EntityProperty to get -# custom data type support. - -def _from_entity_binary(value): - return EntityProperty('Edm.Binary', _decode_base64_to_bytes(value)) - - -def _from_entity_int(value): - return int(value) - - -def _from_entity_datetime(value): - format = '%Y-%m-%dT%H:%M:%S' - if '.' in value: - format = format + '.%f' - if value.endswith('Z'): - format = format + 'Z' - return datetime.strptime(value, format) - -_ENTITY_TO_PYTHON_CONVERSIONS = { - 'Edm.Binary': _from_entity_binary, - 'Edm.Int32': _from_entity_int, - 'Edm.Int64': _from_entity_int, - 'Edm.Double': float, - 'Edm.Boolean': _to_python_bool, - 'Edm.DateTime': _from_entity_datetime, -} - -# Conversion from Python type to a function which returns a tuple of the -# type string and content string. -_PYTHON_TO_ENTITY_CONVERSIONS = { - int: _to_entity_int, - bool: _to_entity_bool, - datetime: _to_entity_datetime, - float: _to_entity_float, - EntityProperty: _to_entity_property, - str: _to_entity_str, -} - -if sys.version_info < (3,): - _PYTHON_TO_ENTITY_CONVERSIONS.update({ - long: _to_entity_int, - types.NoneType: _to_entity_none, - unicode: _to_entity_str, - }) - - -def _convert_entity_to_xml(source): - ''' Converts an entity object to xml to send. - - The entity format is: - <entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom"> - <title /> - <updated>2008-09-18T23:46:19.3857256Z</updated> - <author> - <name /> - </author> - <id /> - <content type="application/xml"> - <m:properties> - <d:Address>Mountain View</d:Address> - <d:Age m:type="Edm.Int32">23</d:Age> - <d:AmountDue m:type="Edm.Double">200.23</d:AmountDue> - <d:BinaryData m:type="Edm.Binary" m:null="true" /> - <d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode> - <d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince> - <d:IsActive m:type="Edm.Boolean">true</d:IsActive> - <d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders> - <d:PartitionKey>mypartitionkey</d:PartitionKey> - <d:RowKey>myrowkey1</d:RowKey> - <d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp> - </m:properties> - </content> - </entry> - ''' - - # construct the entity body included in <m:properties> and </m:properties> - entity_body = '<m:properties xml:space="preserve">{properties}</m:properties>' - - if isinstance(source, WindowsAzureData): - source = vars(source) - - properties_str = '' - - # set properties type for types we know if value has no type info. - # if value has type info, then set the type to value.type - for name, value in source.items(): - mtype = '' - conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(type(value)) - if conv is None and sys.version_info >= (3,) and value is None: - conv = _to_entity_none - if conv is None: - raise WindowsAzureError( - _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY.format( - type(value).__name__)) - - mtype, value = conv(value) - - # form the property node - properties_str += ''.join(['<d:', name]) - if value is None: - properties_str += ' m:null="true" />' - else: - if mtype: - properties_str += ''.join([' m:type="', mtype, '"']) - properties_str += ''.join(['>', - xml_escape(value), '</d:', name, '>']) - - if sys.version_info < (3,): - if isinstance(properties_str, unicode): - properties_str = properties_str.encode(encoding='utf-8') - - # generate the entity_body - entity_body = entity_body.format(properties=properties_str) - xmlstr = _create_entry(entity_body) - return xmlstr - - -def _convert_table_to_xml(table_name): - ''' - Create xml to send for a given table name. Since xml format for table is - the same as entity and the only difference is that table has only one - property 'TableName', so we just call _convert_entity_to_xml. - - table_name: the name of the table - ''' - return _convert_entity_to_xml({'TableName': table_name}) - - -def _convert_block_list_to_xml(block_id_list): - ''' - Convert a block list to xml to send. - - block_id_list: - a str list containing the block ids that are used in put_block_list. - Only get block from latest blocks. - ''' - if block_id_list is None: - return '' - xml = '<?xml version="1.0" encoding="utf-8"?><BlockList>' - for value in block_id_list: - xml += '<Latest>{0}</Latest>'.format(_encode_base64(value)) - - return xml + '</BlockList>' - - -def _create_blob_result(response): - blob_properties = _parse_response_for_dict(response) - return BlobResult(response.body, blob_properties) - - -def _convert_response_to_block_list(response): - ''' - Converts xml response to block list class. - ''' - blob_block_list = BlobBlockList() - - xmldoc = minidom.parseString(response.body) - for xml_block in _get_children_from_path(xmldoc, - 'BlockList', - 'CommittedBlocks', - 'Block'): - xml_block_id = _decode_base64_to_text( - _get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue) - xml_block_size = int( - _get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue) - blob_block_list.committed_blocks.append( - BlobBlock(xml_block_id, xml_block_size)) - - for xml_block in _get_children_from_path(xmldoc, - 'BlockList', - 'UncommittedBlocks', - 'Block'): - xml_block_id = _decode_base64_to_text( - _get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue) - xml_block_size = int( - _get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue) - blob_block_list.uncommitted_blocks.append( - BlobBlock(xml_block_id, xml_block_size)) - - return blob_block_list - - -def _remove_prefix(name): - colon = name.find(':') - if colon != -1: - return name[colon + 1:] - return name - - -def _convert_response_to_entity(response): - if response is None: - return response - return _convert_xml_to_entity(response.body) - - -def _convert_xml_to_entity(xmlstr): - ''' Convert xml response to entity. - - The format of entity: - <entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom"> - <title /> - <updated>2008-09-18T23:46:19.3857256Z</updated> - <author> - <name /> - </author> - <id /> - <content type="application/xml"> - <m:properties> - <d:Address>Mountain View</d:Address> - <d:Age m:type="Edm.Int32">23</d:Age> - <d:AmountDue m:type="Edm.Double">200.23</d:AmountDue> - <d:BinaryData m:type="Edm.Binary" m:null="true" /> - <d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode> - <d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince> - <d:IsActive m:type="Edm.Boolean">true</d:IsActive> - <d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders> - <d:PartitionKey>mypartitionkey</d:PartitionKey> - <d:RowKey>myrowkey1</d:RowKey> - <d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp> - </m:properties> - </content> - </entry> - ''' - xmldoc = minidom.parseString(xmlstr) - - xml_properties = None - for entry in _get_child_nodes(xmldoc, 'entry'): - for content in _get_child_nodes(entry, 'content'): - # TODO: Namespace - xml_properties = _get_child_nodesNS( - content, METADATA_NS, 'properties') - - if not xml_properties: - return None - - entity = Entity() - # extract each property node and get the type from attribute and node value - for xml_property in xml_properties[0].childNodes: - name = _remove_prefix(xml_property.nodeName) - # exclude the Timestamp since it is auto added by azure when - # inserting entity. We don't want this to mix with real properties - if name in ['Timestamp']: - continue - - if xml_property.firstChild: - value = xml_property.firstChild.nodeValue - else: - value = '' - - isnull = xml_property.getAttributeNS(METADATA_NS, 'null') - mtype = xml_property.getAttributeNS(METADATA_NS, 'type') - - # if not isnull and no type info, then it is a string and we just - # need the str type to hold the property. - if not isnull and not mtype: - _set_entity_attr(entity, name, value) - elif isnull == 'true': - if mtype: - property = EntityProperty(mtype, None) - else: - property = EntityProperty('Edm.String', None) - else: # need an object to hold the property - conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype) - if conv is not None: - property = conv(value) - else: - property = EntityProperty(mtype, value) - _set_entity_attr(entity, name, property) - - # extract id, updated and name value from feed entry and set them of - # rule. - for name, value in _get_entry_properties(xmlstr, True).items(): - if name in ['etag']: - _set_entity_attr(entity, name, value) - - return entity - - -def _set_entity_attr(entity, name, value): - try: - setattr(entity, name, value) - except UnicodeEncodeError: - # Python 2 doesn't support unicode attribute names, so we'll - # add them and access them directly through the dictionary - entity.__dict__[name] = value - - -def _convert_xml_to_table(xmlstr): - ''' Converts the xml response to table class. - Simply call convert_xml_to_entity and extract the table name, and add - updated and author info - ''' - table = Table() - entity = _convert_xml_to_entity(xmlstr) - setattr(table, 'name', entity.TableName) - for name, value in _get_entry_properties(xmlstr, False).items(): - setattr(table, name, value) - return table - - -def _storage_error_handler(http_error): - ''' Simple error handler for storage service. ''' - return _general_error_handler(http_error) - -# make these available just from storage. -from azure.storage.blobservice import BlobService -from azure.storage.queueservice import QueueService -from azure.storage.tableservice import TableService -from azure.storage.cloudstorageaccount import CloudStorageAccount -from azure.storage.sharedaccesssignature import ( - SharedAccessSignature, - SharedAccessPolicy, - Permission, - WebResource, - ) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import sys +import types + +from datetime import datetime +from dateutil import parser +from dateutil.tz import tzutc +from xml.dom import minidom +from azure import (WindowsAzureData, + WindowsAzureError, + METADATA_NS, + xml_escape, + _create_entry, + _decode_base64_to_text, + _decode_base64_to_bytes, + _encode_base64, + _fill_data_minidom, + _fill_instance_element, + _get_child_nodes, + _get_child_nodesNS, + _get_children_from_path, + _get_entry_properties, + _general_error_handler, + _list_of, + _parse_response_for_dict, + _sign_string, + _unicode_type, + _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY, + ) + +# x-ms-version for storage service. +X_MS_VERSION = '2012-02-12' + + +class EnumResultsBase(object): + + ''' base class for EnumResults. ''' + + def __init__(self): + self.prefix = u'' + self.marker = u'' + self.max_results = 0 + self.next_marker = u'' + + +class ContainerEnumResults(EnumResultsBase): + + ''' Blob Container list. ''' + + def __init__(self): + EnumResultsBase.__init__(self) + self.containers = _list_of(Container) + + def __iter__(self): + return iter(self.containers) + + def __len__(self): + return len(self.containers) + + def __getitem__(self, index): + return self.containers[index] + + +class Container(WindowsAzureData): + + ''' Blob container class. ''' + + def __init__(self): + self.name = u'' + self.url = u'' + self.properties = Properties() + self.metadata = {} + + +class Properties(WindowsAzureData): + + ''' Blob container's properties class. ''' + + def __init__(self): + self.last_modified = u'' + self.etag = u'' + + +class RetentionPolicy(WindowsAzureData): + + ''' RetentionPolicy in service properties. ''' + + def __init__(self): + self.enabled = False + self.__dict__['days'] = None + + def get_days(self): + # convert days to int value + return int(self.__dict__['days']) + + def set_days(self, value): + ''' set default days if days is set to empty. ''' + self.__dict__['days'] = value + + days = property(fget=get_days, fset=set_days) + + +class Logging(WindowsAzureData): + + ''' Logging class in service properties. ''' + + def __init__(self): + self.version = u'1.0' + self.delete = False + self.read = False + self.write = False + self.retention_policy = RetentionPolicy() + + +class Metrics(WindowsAzureData): + + ''' Metrics class in service properties. ''' + + def __init__(self): + self.version = u'1.0' + self.enabled = False + self.include_apis = None + self.retention_policy = RetentionPolicy() + + +class StorageServiceProperties(WindowsAzureData): + + ''' Storage Service Propeties class. ''' + + def __init__(self): + self.logging = Logging() + self.metrics = Metrics() + + +class AccessPolicy(WindowsAzureData): + + ''' Access Policy class in service properties. ''' + + def __init__(self, start=u'', expiry=u'', permission='u'): + self.start = start + self.expiry = expiry + self.permission = permission + + +class SignedIdentifier(WindowsAzureData): + + ''' Signed Identifier class for service properties. ''' + + def __init__(self): + self.id = u'' + self.access_policy = AccessPolicy() + + +class SignedIdentifiers(WindowsAzureData): + + ''' SignedIdentifier list. ''' + + def __init__(self): + self.signed_identifiers = _list_of(SignedIdentifier) + + def __iter__(self): + return iter(self.signed_identifiers) + + def __len__(self): + return len(self.signed_identifiers) + + def __getitem__(self, index): + return self.signed_identifiers[index] + + +class BlobEnumResults(EnumResultsBase): + + ''' Blob list.''' + + def __init__(self): + EnumResultsBase.__init__(self) + self.blobs = _list_of(Blob) + self.prefixes = _list_of(BlobPrefix) + self.delimiter = '' + + def __iter__(self): + return iter(self.blobs) + + def __len__(self): + return len(self.blobs) + + def __getitem__(self, index): + return self.blobs[index] + + +class BlobResult(bytes): + + def __new__(cls, blob, properties): + return bytes.__new__(cls, blob if blob else b'') + + def __init__(self, blob, properties): + self.properties = properties + + +class Blob(WindowsAzureData): + + ''' Blob class. ''' + + def __init__(self): + self.name = u'' + self.snapshot = u'' + self.url = u'' + self.properties = BlobProperties() + self.metadata = {} + + +class BlobProperties(WindowsAzureData): + + ''' Blob Properties ''' + + def __init__(self): + self.last_modified = u'' + self.etag = u'' + self.content_length = 0 + self.content_type = u'' + self.content_encoding = u'' + self.content_language = u'' + self.content_md5 = u'' + self.xms_blob_sequence_number = 0 + self.blob_type = u'' + self.lease_status = u'' + self.lease_state = u'' + self.lease_duration = u'' + self.copy_id = u'' + self.copy_source = u'' + self.copy_status = u'' + self.copy_progress = u'' + self.copy_completion_time = u'' + self.copy_status_description = u'' + + +class BlobPrefix(WindowsAzureData): + + ''' BlobPrefix in Blob. ''' + + def __init__(self): + self.name = '' + + +class BlobBlock(WindowsAzureData): + + ''' BlobBlock class ''' + + def __init__(self, id=None, size=None): + self.id = id + self.size = size + + +class BlobBlockList(WindowsAzureData): + + ''' BlobBlockList class ''' + + def __init__(self): + self.committed_blocks = [] + self.uncommitted_blocks = [] + + +class PageRange(WindowsAzureData): + + ''' Page Range for page blob. ''' + + def __init__(self): + self.start = 0 + self.end = 0 + + +class PageList(object): + + ''' Page list for page blob. ''' + + def __init__(self): + self.page_ranges = _list_of(PageRange) + + def __iter__(self): + return iter(self.page_ranges) + + def __len__(self): + return len(self.page_ranges) + + def __getitem__(self, index): + return self.page_ranges[index] + + +class QueueEnumResults(EnumResultsBase): + + ''' Queue list''' + + def __init__(self): + EnumResultsBase.__init__(self) + self.queues = _list_of(Queue) + + def __iter__(self): + return iter(self.queues) + + def __len__(self): + return len(self.queues) + + def __getitem__(self, index): + return self.queues[index] + + +class Queue(WindowsAzureData): + + ''' Queue class ''' + + def __init__(self): + self.name = u'' + self.url = u'' + self.metadata = {} + + +class QueueMessagesList(WindowsAzureData): + + ''' Queue message list. ''' + + def __init__(self): + self.queue_messages = _list_of(QueueMessage) + + def __iter__(self): + return iter(self.queue_messages) + + def __len__(self): + return len(self.queue_messages) + + def __getitem__(self, index): + return self.queue_messages[index] + + +class QueueMessage(WindowsAzureData): + + ''' Queue message class. ''' + + def __init__(self): + self.message_id = u'' + self.insertion_time = u'' + self.expiration_time = u'' + self.pop_receipt = u'' + self.time_next_visible = u'' + self.dequeue_count = u'' + self.message_text = u'' + + +class Entity(WindowsAzureData): + + ''' Entity class. The attributes of entity will be created dynamically. ''' + pass + + +class EntityProperty(WindowsAzureData): + + ''' Entity property. contains type and value. ''' + + def __init__(self, type=None, value=None): + self.type = type + self.value = value + + +class Table(WindowsAzureData): + + ''' Only for intellicens and telling user the return type. ''' + pass + + +def _parse_blob_enum_results_list(response): + respbody = response.body + return_obj = BlobEnumResults() + doc = minidom.parseString(respbody) + + for enum_results in _get_child_nodes(doc, 'EnumerationResults'): + for child in _get_children_from_path(enum_results, 'Blobs', 'Blob'): + return_obj.blobs.append(_fill_instance_element(child, Blob)) + + for child in _get_children_from_path(enum_results, + 'Blobs', + 'BlobPrefix'): + return_obj.prefixes.append( + _fill_instance_element(child, BlobPrefix)) + + for name, value in vars(return_obj).items(): + if name == 'blobs' or name == 'prefixes': + continue + value = _fill_data_minidom(enum_results, name, value) + if value is not None: + setattr(return_obj, name, value) + + return return_obj + + +def _update_storage_header(request): + ''' add additional headers for storage request. ''' + if request.body: + assert isinstance(request.body, bytes) + + # if it is PUT, POST, MERGE, DELETE, need to add content-length to header. + if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']: + request.headers.append(('Content-Length', str(len(request.body)))) + + # append addtional headers base on the service + request.headers.append(('x-ms-version', X_MS_VERSION)) + + # append x-ms-meta name, values to header + for name, value in request.headers: + if 'x-ms-meta-name-values' in name and value: + for meta_name, meta_value in value.items(): + request.headers.append(('x-ms-meta-' + meta_name, meta_value)) + request.headers.remove((name, value)) + break + return request + + +def _update_storage_blob_header(request, account_name, account_key): + ''' add additional headers for storage blob request. ''' + + request = _update_storage_header(request) + current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') + request.headers.append(('x-ms-date', current_time)) + request.headers.append( + ('Content-Type', 'application/octet-stream Charset=UTF-8')) + request.headers.append(('Authorization', + _sign_storage_blob_request(request, + account_name, + account_key))) + + return request.headers + + +def _update_storage_queue_header(request, account_name, account_key): + ''' add additional headers for storage queue request. ''' + return _update_storage_blob_header(request, account_name, account_key) + + +def _update_storage_table_header(request): + ''' add additional headers for storage table request. ''' + + request = _update_storage_header(request) + for name, _ in request.headers: + if name.lower() == 'content-type': + break + else: + request.headers.append(('Content-Type', 'application/atom+xml')) + request.headers.append(('DataServiceVersion', '2.0;NetFx')) + request.headers.append(('MaxDataServiceVersion', '2.0;NetFx')) + current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') + request.headers.append(('x-ms-date', current_time)) + request.headers.append(('Date', current_time)) + return request.headers + + +def _sign_storage_blob_request(request, account_name, account_key): + ''' + Returns the signed string for blob request which is used to set + Authorization header. This is also used to sign queue request. + ''' + + uri_path = request.path.split('?')[0] + + # method to sign + string_to_sign = request.method + '\n' + + # get headers to sign + headers_to_sign = [ + 'content-encoding', 'content-language', 'content-length', + 'content-md5', 'content-type', 'date', 'if-modified-since', + 'if-match', 'if-none-match', 'if-unmodified-since', 'range'] + + request_header_dict = dict((name.lower(), value) + for name, value in request.headers if value) + string_to_sign += '\n'.join(request_header_dict.get(x, '') + for x in headers_to_sign) + '\n' + + # get x-ms header to sign + x_ms_headers = [] + for name, value in request.headers: + if 'x-ms' in name: + x_ms_headers.append((name.lower(), value)) + x_ms_headers.sort() + for name, value in x_ms_headers: + if value: + string_to_sign += ''.join([name, ':', value, '\n']) + + # get account_name and uri path to sign + string_to_sign += '/' + account_name + uri_path + + # get query string to sign if it is not table service + query_to_sign = request.query + query_to_sign.sort() + + current_name = '' + for name, value in query_to_sign: + if value: + if current_name != name: + string_to_sign += '\n' + name + ':' + value + else: + string_to_sign += '\n' + ',' + value + + # sign the request + auth_string = 'SharedKey ' + account_name + ':' + \ + _sign_string(account_key, string_to_sign) + return auth_string + + +def _sign_storage_table_request(request, account_name, account_key): + uri_path = request.path.split('?')[0] + + string_to_sign = request.method + '\n' + headers_to_sign = ['content-md5', 'content-type', 'date'] + request_header_dict = dict((name.lower(), value) + for name, value in request.headers if value) + string_to_sign += '\n'.join(request_header_dict.get(x, '') + for x in headers_to_sign) + '\n' + + # get account_name and uri path to sign + string_to_sign += ''.join(['/', account_name, uri_path]) + + for name, value in request.query: + if name == 'comp' and uri_path == '/': + string_to_sign += '?comp=' + value + break + + # sign the request + auth_string = 'SharedKey ' + account_name + ':' + \ + _sign_string(account_key, string_to_sign) + return auth_string + + +def _to_python_bool(value): + if value.lower() == 'true': + return True + return False + + +def _to_entity_int(data): + int_max = (2 << 30) - 1 + if data > (int_max) or data < (int_max + 1) * (-1): + return 'Edm.Int64', str(data) + else: + return 'Edm.Int32', str(data) + + +def _to_entity_bool(value): + if value: + return 'Edm.Boolean', 'true' + return 'Edm.Boolean', 'false' + + +def _to_entity_datetime(value): + # Azure expects the date value passed in to be UTC. + # Azure will always return values as UTC. + # If a date is passed in without timezone info, it is assumed to be UTC. + if value.tzinfo: + value = value.astimezone(tzutc()) + return 'Edm.DateTime', value.strftime('%Y-%m-%dT%H:%M:%SZ') + + +def _to_entity_float(value): + return 'Edm.Double', str(value) + + +def _to_entity_property(value): + if value.type == 'Edm.Binary': + return value.type, _encode_base64(value.value) + + return value.type, str(value.value) + + +def _to_entity_none(value): + return None, None + + +def _to_entity_str(value): + return 'Edm.String', value + + +# Tables of conversions to and from entity types. We support specific +# datatypes, and beyond that the user can use an EntityProperty to get +# custom data type support. + +def _from_entity_binary(value): + return EntityProperty('Edm.Binary', _decode_base64_to_bytes(value)) + + +def _from_entity_int(value): + return int(value) + + +def _from_entity_datetime(value): + # Note that Azure always returns UTC datetime, and dateutil parser + # will set the tzinfo on the date it returns + return parser.parse(value) + +_ENTITY_TO_PYTHON_CONVERSIONS = { + 'Edm.Binary': _from_entity_binary, + 'Edm.Int32': _from_entity_int, + 'Edm.Int64': _from_entity_int, + 'Edm.Double': float, + 'Edm.Boolean': _to_python_bool, + 'Edm.DateTime': _from_entity_datetime, +} + +# Conversion from Python type to a function which returns a tuple of the +# type string and content string. +_PYTHON_TO_ENTITY_CONVERSIONS = { + int: _to_entity_int, + bool: _to_entity_bool, + datetime: _to_entity_datetime, + float: _to_entity_float, + EntityProperty: _to_entity_property, + str: _to_entity_str, +} + +if sys.version_info < (3,): + _PYTHON_TO_ENTITY_CONVERSIONS.update({ + long: _to_entity_int, + types.NoneType: _to_entity_none, + unicode: _to_entity_str, + }) + + +def _convert_entity_to_xml(source): + ''' Converts an entity object to xml to send. + + The entity format is: + <entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom"> + <title /> + <updated>2008-09-18T23:46:19.3857256Z</updated> + <author> + <name /> + </author> + <id /> + <content type="application/xml"> + <m:properties> + <d:Address>Mountain View</d:Address> + <d:Age m:type="Edm.Int32">23</d:Age> + <d:AmountDue m:type="Edm.Double">200.23</d:AmountDue> + <d:BinaryData m:type="Edm.Binary" m:null="true" /> + <d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode> + <d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince> + <d:IsActive m:type="Edm.Boolean">true</d:IsActive> + <d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders> + <d:PartitionKey>mypartitionkey</d:PartitionKey> + <d:RowKey>myrowkey1</d:RowKey> + <d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp> + </m:properties> + </content> + </entry> + ''' + + # construct the entity body included in <m:properties> and </m:properties> + entity_body = '<m:properties xml:space="preserve">{properties}</m:properties>' + + if isinstance(source, WindowsAzureData): + source = vars(source) + + properties_str = '' + + # set properties type for types we know if value has no type info. + # if value has type info, then set the type to value.type + for name, value in source.items(): + mtype = '' + conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(type(value)) + if conv is None and sys.version_info >= (3,) and value is None: + conv = _to_entity_none + if conv is None: + raise WindowsAzureError( + _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY.format( + type(value).__name__)) + + mtype, value = conv(value) + + # form the property node + properties_str += ''.join(['<d:', name]) + if value is None: + properties_str += ' m:null="true" />' + else: + if mtype: + properties_str += ''.join([' m:type="', mtype, '"']) + properties_str += ''.join(['>', + xml_escape(value), '</d:', name, '>']) + + if sys.version_info < (3,): + if isinstance(properties_str, unicode): + properties_str = properties_str.encode('utf-8') + + # generate the entity_body + entity_body = entity_body.format(properties=properties_str) + xmlstr = _create_entry(entity_body) + return xmlstr + + +def _convert_table_to_xml(table_name): + ''' + Create xml to send for a given table name. Since xml format for table is + the same as entity and the only difference is that table has only one + property 'TableName', so we just call _convert_entity_to_xml. + + table_name: the name of the table + ''' + return _convert_entity_to_xml({'TableName': table_name}) + + +def _convert_block_list_to_xml(block_id_list): + ''' + Convert a block list to xml to send. + + block_id_list: + a str list containing the block ids that are used in put_block_list. + Only get block from latest blocks. + ''' + if block_id_list is None: + return '' + xml = '<?xml version="1.0" encoding="utf-8"?><BlockList>' + for value in block_id_list: + xml += '<Latest>{0}</Latest>'.format(_encode_base64(value)) + + return xml + '</BlockList>' + + +def _create_blob_result(response): + blob_properties = _parse_response_for_dict(response) + return BlobResult(response.body, blob_properties) + + +def _convert_response_to_block_list(response): + ''' + Converts xml response to block list class. + ''' + blob_block_list = BlobBlockList() + + xmldoc = minidom.parseString(response.body) + for xml_block in _get_children_from_path(xmldoc, + 'BlockList', + 'CommittedBlocks', + 'Block'): + xml_block_id = _decode_base64_to_text( + _get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue) + xml_block_size = int( + _get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue) + blob_block_list.committed_blocks.append( + BlobBlock(xml_block_id, xml_block_size)) + + for xml_block in _get_children_from_path(xmldoc, + 'BlockList', + 'UncommittedBlocks', + 'Block'): + xml_block_id = _decode_base64_to_text( + _get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue) + xml_block_size = int( + _get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue) + blob_block_list.uncommitted_blocks.append( + BlobBlock(xml_block_id, xml_block_size)) + + return blob_block_list + + +def _remove_prefix(name): + colon = name.find(':') + if colon != -1: + return name[colon + 1:] + return name + + +def _convert_response_to_entity(response): + if response is None: + return response + return _convert_xml_to_entity(response.body) + + +def _convert_xml_to_entity(xmlstr): + ''' Convert xml response to entity. + + The format of entity: + <entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom"> + <title /> + <updated>2008-09-18T23:46:19.3857256Z</updated> + <author> + <name /> + </author> + <id /> + <content type="application/xml"> + <m:properties> + <d:Address>Mountain View</d:Address> + <d:Age m:type="Edm.Int32">23</d:Age> + <d:AmountDue m:type="Edm.Double">200.23</d:AmountDue> + <d:BinaryData m:type="Edm.Binary" m:null="true" /> + <d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode> + <d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince> + <d:IsActive m:type="Edm.Boolean">true</d:IsActive> + <d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders> + <d:PartitionKey>mypartitionkey</d:PartitionKey> + <d:RowKey>myrowkey1</d:RowKey> + <d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp> + </m:properties> + </content> + </entry> + ''' + xmldoc = minidom.parseString(xmlstr) + + xml_properties = None + for entry in _get_child_nodes(xmldoc, 'entry'): + for content in _get_child_nodes(entry, 'content'): + # TODO: Namespace + xml_properties = _get_child_nodesNS( + content, METADATA_NS, 'properties') + + if not xml_properties: + return None + + entity = Entity() + # extract each property node and get the type from attribute and node value + for xml_property in xml_properties[0].childNodes: + name = _remove_prefix(xml_property.nodeName) + + if xml_property.firstChild: + value = xml_property.firstChild.nodeValue + else: + value = '' + + isnull = xml_property.getAttributeNS(METADATA_NS, 'null') + mtype = xml_property.getAttributeNS(METADATA_NS, 'type') + + # if not isnull and no type info, then it is a string and we just + # need the str type to hold the property. + if not isnull and not mtype: + _set_entity_attr(entity, name, value) + elif isnull == 'true': + if mtype: + property = EntityProperty(mtype, None) + else: + property = EntityProperty('Edm.String', None) + else: # need an object to hold the property + conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype) + if conv is not None: + property = conv(value) + else: + property = EntityProperty(mtype, value) + _set_entity_attr(entity, name, property) + + # extract id, updated and name value from feed entry and set them of + # rule. + for name, value in _get_entry_properties(xmlstr, True).items(): + if name in ['etag']: + _set_entity_attr(entity, name, value) + + return entity + + +def _set_entity_attr(entity, name, value): + try: + setattr(entity, name, value) + except UnicodeEncodeError: + # Python 2 doesn't support unicode attribute names, so we'll + # add them and access them directly through the dictionary + entity.__dict__[name] = value + + +def _convert_xml_to_table(xmlstr): + ''' Converts the xml response to table class. + Simply call convert_xml_to_entity and extract the table name, and add + updated and author info + ''' + table = Table() + entity = _convert_xml_to_entity(xmlstr) + setattr(table, 'name', entity.TableName) + for name, value in _get_entry_properties(xmlstr, False).items(): + setattr(table, name, value) + return table + + +def _storage_error_handler(http_error): + ''' Simple error handler for storage service. ''' + return _general_error_handler(http_error) + +# make these available just from storage. +from azure.storage.blobservice import BlobService +from azure.storage.queueservice import QueueService +from azure.storage.tableservice import TableService +from azure.storage.cloudstorageaccount import CloudStorageAccount +from azure.storage.sharedaccesssignature import ( + SharedAccessSignature, + SharedAccessPolicy, + Permission, + WebResource, + ) diff --git a/awx/lib/site-packages/azure/storage/blobservice.py b/awx/lib/site-packages/azure/storage/blobservice.py index aca56b4584..f6b060eccd 100644 --- a/awx/lib/site-packages/azure/storage/blobservice.py +++ b/awx/lib/site-packages/azure/storage/blobservice.py @@ -1,2225 +1,2178 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import ( - WindowsAzureError, - BLOB_SERVICE_HOST_BASE, - DEV_BLOB_HOST, - _ERROR_VALUE_NEGATIVE, - _ERROR_PAGE_BLOB_SIZE_ALIGNMENT, - _convert_class_to_xml, - _dont_fail_not_exist, - _dont_fail_on_exist, - _encode_base64, - _get_request_body, - _get_request_body_bytes_only, - _int_or_none, - _parse_enum_results_list, - _parse_response, - _parse_response_for_dict, - _parse_response_for_dict_filter, - _parse_response_for_dict_prefix, - _parse_simple_list, - _str, - _str_or_none, - _update_request_uri_query_local_storage, - _validate_type_bytes, - _validate_not_none, - ) -from azure.http import HTTPRequest -from azure.storage import ( - Container, - ContainerEnumResults, - PageList, - PageRange, - SignedIdentifiers, - StorageServiceProperties, - _convert_block_list_to_xml, - _convert_response_to_block_list, - _create_blob_result, - _parse_blob_enum_results_list, - _update_storage_blob_header, - ) -from azure.storage.storageclient import _StorageClient -from os import path -import sys -if sys.version_info >= (3,): - from io import BytesIO -else: - from cStringIO import StringIO as BytesIO - -# Keep this value sync with _ERROR_PAGE_BLOB_SIZE_ALIGNMENT -_PAGE_SIZE = 512 - -class BlobService(_StorageClient): - - ''' - This is the main class managing Blob resources. - ''' - - def __init__(self, account_name=None, account_key=None, protocol='https', - host_base=BLOB_SERVICE_HOST_BASE, dev_host=DEV_BLOB_HOST): - ''' - account_name: your storage account name, required for all operations. - account_key: your storage account key, required for all operations. - protocol: Optional. Protocol. Defaults to https. - host_base: - Optional. Live host base url. Defaults to Azure url. Override this - for on-premise. - dev_host: Optional. Dev host url. Defaults to localhost. - ''' - self._BLOB_MAX_DATA_SIZE = 64 * 1024 * 1024 - self._BLOB_MAX_CHUNK_DATA_SIZE = 4 * 1024 * 1024 - super(BlobService, self).__init__( - account_name, account_key, protocol, host_base, dev_host) - - def make_blob_url(self, container_name, blob_name, account_name=None, - protocol=None, host_base=None): - ''' - Creates the url to access a blob. - - container_name: Name of container. - blob_name: Name of blob. - account_name: - Name of the storage account. If not specified, uses the account - specified when BlobService was initialized. - protocol: - Protocol to use: 'http' or 'https'. If not specified, uses the - protocol specified when BlobService was initialized. - host_base: - Live host base url. If not specified, uses the host base specified - when BlobService was initialized. - ''' - if not account_name: - account_name = self.account_name - if not protocol: - protocol = self.protocol - if not host_base: - host_base = self.host_base - - return '{0}://{1}{2}/{3}/{4}'.format(protocol, - account_name, - host_base, - container_name, - blob_name) - - def list_containers(self, prefix=None, marker=None, maxresults=None, - include=None): - ''' - The List Containers operation returns a list of the containers under - the specified account. - - prefix: - Optional. Filters the results to return only containers whose names - begin with the specified prefix. - marker: - Optional. A string value that identifies the portion of the list to - be returned with the next list operation. - maxresults: - Optional. Specifies the maximum number of containers to return. - include: - Optional. Include this parameter to specify that the container's - metadata be returned as part of the response body. set this - parameter to string 'metadata' to get container's metadata. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/?comp=list' - request.query = [ - ('prefix', _str_or_none(prefix)), - ('marker', _str_or_none(marker)), - ('maxresults', _int_or_none(maxresults)), - ('include', _str_or_none(include)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_enum_results_list(response, - ContainerEnumResults, - "Containers", - Container) - - def create_container(self, container_name, x_ms_meta_name_values=None, - x_ms_blob_public_access=None, fail_on_exist=False): - ''' - Creates a new container under the specified account. If the container - with the same name already exists, the operation fails. - - container_name: Name of container to create. - x_ms_meta_name_values: - Optional. A dict with name_value pairs to associate with the - container as metadata. Example:{'Category':'test'} - x_ms_blob_public_access: - Optional. Possible values include: container, blob - fail_on_exist: - specify whether to throw an exception when the container exists. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '?restype=container' - request.headers = [ - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-blob-public-access', _str_or_none(x_ms_blob_public_access)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_container_properties(self, container_name, x_ms_lease_id=None): - ''' - Returns all user-defined metadata and system properties for the - specified container. - - container_name: Name of existing container. - x_ms_lease_id: - If specified, get_container_properties only succeeds if the - container's lease is active and matches this ID. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '?restype=container' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict(response) - - def get_container_metadata(self, container_name, x_ms_lease_id=None): - ''' - Returns all user-defined metadata for the specified container. The - metadata will be in returned dictionary['x-ms-meta-(name)']. - - container_name: Name of existing container. - x_ms_lease_id: - If specified, get_container_metadata only succeeds if the - container's lease is active and matches this ID. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=metadata' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_prefix(response, prefixes=['x-ms-meta']) - - def set_container_metadata(self, container_name, - x_ms_meta_name_values=None, x_ms_lease_id=None): - ''' - Sets one or more user-defined name-value pairs for the specified - container. - - container_name: Name of existing container. - x_ms_meta_name_values: - A dict containing name, value for metadata. - Example: {'category':'test'} - x_ms_lease_id: - If specified, set_container_metadata only succeeds if the - container's lease is active and matches this ID. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=metadata' - request.headers = [ - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def get_container_acl(self, container_name, x_ms_lease_id=None): - ''' - Gets the permissions for the specified container. - - container_name: Name of existing container. - x_ms_lease_id: - If specified, get_container_acl only succeeds if the - container's lease is active and matches this ID. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=acl' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response(response, SignedIdentifiers) - - def set_container_acl(self, container_name, signed_identifiers=None, - x_ms_blob_public_access=None, x_ms_lease_id=None): - ''' - Sets the permissions for the specified container. - - container_name: Name of existing container. - signed_identifiers: SignedIdentifers instance - x_ms_blob_public_access: - Optional. Possible values include: container, blob - x_ms_lease_id: - If specified, set_container_acl only succeeds if the - container's lease is active and matches this ID. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=acl' - request.headers = [ - ('x-ms-blob-public-access', _str_or_none(x_ms_blob_public_access)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - request.body = _get_request_body( - _convert_class_to_xml(signed_identifiers)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def delete_container(self, container_name, fail_not_exist=False, - x_ms_lease_id=None): - ''' - Marks the specified container for deletion. - - container_name: Name of container to delete. - fail_not_exist: - Specify whether to throw an exception when the container doesn't - exist. - x_ms_lease_id: Required if the container has an active lease. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '?restype=container' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def lease_container(self, container_name, x_ms_lease_action, - x_ms_lease_id=None, x_ms_lease_duration=60, - x_ms_lease_break_period=None, - x_ms_proposed_lease_id=None): - ''' - Establishes and manages a lock on a container for delete operations. - The lock duration can be 15 to 60 seconds, or can be infinite. - - container_name: Name of existing container. - x_ms_lease_action: - Required. Possible values: acquire|renew|release|break|change - x_ms_lease_id: Required if the container has an active lease. - x_ms_lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. For backwards compatibility, the default is - 60, and the value is only used on an acquire operation. - x_ms_lease_break_period: - Optional. For a break operation, this is the proposed duration of - seconds that the lease should continue before it is broken, between - 0 and 60 seconds. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining - on the lease is used. A new lease will not be available before the - break period has expired, but the lease may be held for longer than - the break period. If this header does not appear with a break - operation, a fixed-duration lease breaks after the remaining lease - period elapses, and an infinite lease breaks immediately. - x_ms_proposed_lease_id: - Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('x_ms_lease_action', x_ms_lease_action) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=lease' - request.headers = [ - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-lease-action', _str_or_none(x_ms_lease_action)), - ('x-ms-lease-duration', - _str_or_none( - x_ms_lease_duration if x_ms_lease_action == 'acquire'\ - else None)), - ('x-ms-lease-break-period', _str_or_none(x_ms_lease_break_period)), - ('x-ms-proposed-lease-id', _str_or_none(x_ms_proposed_lease_id)), - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_filter( - response, - filter=['x-ms-lease-id', 'x-ms-lease-time']) - - def list_blobs(self, container_name, prefix=None, marker=None, - maxresults=None, include=None, delimiter=None): - ''' - Returns the list of blobs under the specified container. - - container_name: Name of existing container. - prefix: - Optional. Filters the results to return only blobs whose names - begin with the specified prefix. - marker: - Optional. A string value that identifies the portion of the list - to be returned with the next list operation. The operation returns - a marker value within the response body if the list returned was - not complete. The marker value may then be used in a subsequent - call to request the next set of list items. The marker value is - opaque to the client. - maxresults: - Optional. Specifies the maximum number of blobs to return, - including all BlobPrefix elements. If the request does not specify - maxresults or specifies a value greater than 5,000, the server will - return up to 5,000 items. Setting maxresults to a value less than - or equal to zero results in error response code 400 (Bad Request). - include: - Optional. Specifies one or more datasets to include in the - response. To specify more than one of these options on the URI, - you must separate each option with a comma. Valid values are: - snapshots: - Specifies that snapshots should be included in the - enumeration. Snapshots are listed from oldest to newest in - the response. - metadata: - Specifies that blob metadata be returned in the response. - uncommittedblobs: - Specifies that blobs for which blocks have been uploaded, - but which have not been committed using Put Block List - (REST API), be included in the response. - copy: - Version 2012-02-12 and newer. Specifies that metadata - related to any current or previous Copy Blob operation - should be included in the response. - delimiter: - Optional. When the request includes this parameter, the operation - returns a BlobPrefix element in the response body that acts as a - placeholder for all blobs whose names begin with the same - substring up to the appearance of the delimiter character. The - delimiter may be a single character or a string. - ''' - _validate_not_none('container_name', container_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '?restype=container&comp=list' - request.query = [ - ('prefix', _str_or_none(prefix)), - ('delimiter', _str_or_none(delimiter)), - ('marker', _str_or_none(marker)), - ('maxresults', _int_or_none(maxresults)), - ('include', _str_or_none(include)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_blob_enum_results_list(response) - - def set_blob_service_properties(self, storage_service_properties, - timeout=None): - ''' - Sets the properties of a storage account's Blob service, including - Windows Azure Storage Analytics. You can also use this operation to - set the default request version for all incoming requests that do not - have a version specified. - - storage_service_properties: a StorageServiceProperties object. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('storage_service_properties', - storage_service_properties) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.query = [('timeout', _int_or_none(timeout))] - request.body = _get_request_body( - _convert_class_to_xml(storage_service_properties)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def get_blob_service_properties(self, timeout=None): - ''' - Gets the properties of a storage account's Blob service, including - Windows Azure Storage Analytics. - - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response(response, StorageServiceProperties) - - def get_blob_properties(self, container_name, blob_name, - x_ms_lease_id=None): - ''' - Returns all user-defined metadata, standard HTTP properties, and - system properties for the blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'HEAD' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict(response) - - def set_blob_properties(self, container_name, blob_name, - x_ms_blob_cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_md5=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_lease_id=None): - ''' - Sets system properties on the blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_blob_cache_control: - Optional. Modifies the cache control string for the blob. - x_ms_blob_content_type: Optional. Sets the blob's content type. - x_ms_blob_content_md5: Optional. Sets the blob's MD5 hash. - x_ms_blob_content_encoding: Optional. Sets the blob's content encoding. - x_ms_blob_content_language: Optional. Sets the blob's content language. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=properties' - request.headers = [ - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, - content_encoding=None, content_language=None, - content_md5=None, cache_control=None, - x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, x_ms_meta_name_values=None, - x_ms_lease_id=None, x_ms_blob_content_length=None, - x_ms_blob_sequence_number=None): - ''' - Creates a new block blob or page blob, or updates the content of an - existing block blob. - - See put_block_blob_from_* and put_page_blob_from_* for high level - functions that handle the creation and upload of large blobs with - automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - blob: - For BlockBlob: - Content of blob as bytes (size < 64MB). For larger size, you - must call put_block and put_block_list to set content of blob. - For PageBlob: - Use None and call put_page to set content of blob. - x_ms_blob_type: Required. Could be BlockBlob or PageBlob. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - x_ms_blob_content_length: - Required for page blobs. This header specifies the maximum size - for the page blob, up to 1 TB. The page blob size must be aligned - to a 512-byte boundary. - x_ms_blob_sequence_number: - Optional. Set for page blobs only. The sequence number is a - user-controlled value that you can use to track requests. The - value of the sequence number must be between 0 and 2^63 - 1. The - default value is 0. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('x_ms_blob_type', x_ms_blob_type) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-blob-type', _str_or_none(x_ms_blob_type)), - ('Content-Encoding', _str_or_none(content_encoding)), - ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_md5)), - ('Cache-Control', _str_or_none(cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-blob-content-length', - _str_or_none(x_ms_blob_content_length)), - ('x-ms-blob-sequence-number', - _str_or_none(x_ms_blob_sequence_number)) - ] - request.body = _get_request_body_bytes_only('blob', blob) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def put_block_blob_from_path(self, container_name, blob_name, file_path, - content_encoding=None, content_language=None, - content_md5=None, cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, progress_callback=None): - ''' - Creates a new block blob from a file path, or updates the content of an - existing block blob, with automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - file_path: Path of the file to upload as the blob content. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('file_path', file_path) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-blob-type', 'BlockBlob'), - ('Content-Encoding', _str_or_none(content_encoding)), - ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_md5)), - ('Cache-Control', _str_or_none(cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - - count = path.getsize(file_path) - with open(file_path, 'rb') as stream: - self.put_block_blob_from_file(container_name, - blob_name, - stream, - count, - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - progress_callback) - - def put_block_blob_from_file(self, container_name, blob_name, stream, - count=None, content_encoding=None, - content_language=None, content_md5=None, - cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, progress_callback=None): - ''' - Creates a new block blob from a file/stream, or updates the content of - an existing block blob, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - stream: Opened file/stream to upload as the blob content. - count: - Number of bytes to read from the stream. This is optional, but - should be supplied for optimal performance. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('stream', stream) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-blob-type', 'BlockBlob'), - ('Content-Encoding', _str_or_none(content_encoding)), - ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_md5)), - ('Cache-Control', _str_or_none(cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - - if count and count < self._BLOB_MAX_DATA_SIZE: - if progress_callback: - progress_callback(0, count) - - data = stream.read(count) - self.put_blob(container_name, - blob_name, - data, - 'BlockBlob', - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id) - - if progress_callback: - progress_callback(count, count) - else: - if progress_callback: - progress_callback(0, count) - - self.put_blob(container_name, - blob_name, - None, - 'BlockBlob', - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id) - - remain_bytes = count - block_ids = [] - block_index = 0 - index = 0 - while True: - request_count = self._BLOB_MAX_CHUNK_DATA_SIZE\ - if remain_bytes is None else min( - remain_bytes, - self._BLOB_MAX_CHUNK_DATA_SIZE) - data = stream.read(request_count) - if data: - length = len(data) - index += length - remain_bytes = remain_bytes - \ - length if remain_bytes else None - block_id = '{0:08d}'.format(block_index) - self.put_block(container_name, blob_name, - data, block_id, x_ms_lease_id=x_ms_lease_id) - block_ids.append(block_id) - block_index += 1 - if progress_callback: - progress_callback(index, count) - else: - break - - self.put_block_list(container_name, blob_name, block_ids) - - def put_block_blob_from_bytes(self, container_name, blob_name, blob, - index=0, count=None, content_encoding=None, - content_language=None, content_md5=None, - cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, progress_callback=None): - ''' - Creates a new block blob from an array of bytes, or updates the content - of an existing block blob, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - blob: Content of blob as an array of bytes. - index: Start index in the array of bytes. - count: - Number of bytes to upload. Set to None or negative value to upload - all bytes starting from index. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('blob', blob) - _validate_not_none('index', index) - _validate_type_bytes('blob', blob) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-blob-type', 'BlockBlob'), - ('Content-Encoding', _str_or_none(content_encoding)), - ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_md5)), - ('Cache-Control', _str_or_none(cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - - if index < 0: - raise TypeError(_ERROR_VALUE_NEGATIVE.format('index')) - - if count is None or count < 0: - count = len(blob) - index - - if count < self._BLOB_MAX_DATA_SIZE: - if progress_callback: - progress_callback(0, count) - - data = blob[index: index + count] - self.put_blob(container_name, - blob_name, - data, - 'BlockBlob', - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id) - - if progress_callback: - progress_callback(count, count) - else: - stream = BytesIO(blob) - stream.seek(index) - - self.put_block_blob_from_file(container_name, - blob_name, - stream, - count, - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - progress_callback) - - def put_block_blob_from_text(self, container_name, blob_name, text, - text_encoding='utf-8', - content_encoding=None, content_language=None, - content_md5=None, cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, progress_callback=None): - ''' - Creates a new block blob from str/unicode, or updates the content of an - existing block blob, with automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - text: Text to upload to the blob. - text_encoding: Encoding to use to convert the text to bytes. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('text', text) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-blob-type', 'BlockBlob'), - ('Content-Encoding', _str_or_none(content_encoding)), - ('Content-Language', _str_or_none(content_language)), - ('Content-MD5', _str_or_none(content_md5)), - ('Cache-Control', _str_or_none(cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ] - - if not isinstance(text, bytes): - _validate_not_none('text_encoding', text_encoding) - text = text.encode(text_encoding) - - self.put_block_blob_from_bytes(container_name, - blob_name, - text, - 0, - len(text), - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - progress_callback) - - def put_page_blob_from_path(self, container_name, blob_name, file_path, - content_encoding=None, content_language=None, - content_md5=None, cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, - x_ms_blob_sequence_number=None, - progress_callback=None): - ''' - Creates a new page blob from a file path, or updates the content of an - existing page blob, with automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - file_path: Path of the file to upload as the blob content. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - x_ms_blob_sequence_number: - Optional. Set for page blobs only. The sequence number is a - user-controlled value that you can use to track requests. The - value of the sequence number must be between 0 and 2^63 - 1. The - default value is 0. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('file_path', file_path) - - count = path.getsize(file_path) - with open(file_path, 'rb') as stream: - self.put_page_blob_from_file(container_name, - blob_name, - stream, - count, - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - x_ms_blob_sequence_number, - progress_callback) - - def put_page_blob_from_file(self, container_name, blob_name, stream, count, - content_encoding=None, content_language=None, - content_md5=None, cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, - x_ms_blob_sequence_number=None, - progress_callback=None): - ''' - Creates a new page blob from a file/stream, or updates the content of an - existing page blob, with automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - stream: Opened file/stream to upload as the blob content. - count: - Number of bytes to read from the stream. This is required, a page - blob cannot be created if the count is unknown. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - x_ms_blob_sequence_number: - Optional. Set for page blobs only. The sequence number is a - user-controlled value that you can use to track requests. The - value of the sequence number must be between 0 and 2^63 - 1. The - default value is 0. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('stream', stream) - _validate_not_none('count', count) - - if count < 0: - raise TypeError(_ERROR_VALUE_NEGATIVE.format('count')) - - if count % _PAGE_SIZE != 0: - raise TypeError(_ERROR_PAGE_BLOB_SIZE_ALIGNMENT.format(count)) - - if progress_callback: - progress_callback(0, count) - - self.put_blob(container_name, - blob_name, - b'', - 'PageBlob', - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - count, - x_ms_blob_sequence_number) - - remain_bytes = count - page_start = 0 - while True: - request_count = min(remain_bytes, self._BLOB_MAX_CHUNK_DATA_SIZE) - data = stream.read(request_count) - if data: - length = len(data) - remain_bytes = remain_bytes - length - page_end = page_start + length - 1 - self.put_page(container_name, - blob_name, - data, - 'bytes={0}-{1}'.format(page_start, page_end), - 'update', - x_ms_lease_id=x_ms_lease_id) - page_start = page_start + length - - if progress_callback: - progress_callback(page_start, count) - else: - break - - def put_page_blob_from_bytes(self, container_name, blob_name, blob, - index=0, count=None, content_encoding=None, - content_language=None, content_md5=None, - cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, - x_ms_blob_cache_control=None, - x_ms_meta_name_values=None, - x_ms_lease_id=None, - x_ms_blob_sequence_number=None, - progress_callback=None): - ''' - Creates a new page blob from an array of bytes, or updates the content - of an existing page blob, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of blob to create or update. - blob: Content of blob as an array of bytes. - index: Start index in the array of bytes. - count: - Number of bytes to upload. Set to None or negative value to upload - all bytes starting from index. - content_encoding: - Optional. Specifies which content encodings have been applied to - the blob. This value is returned to the client when the Get Blob - (REST API) operation is performed on the blob resource. The client - can use this value when returned to decode the blob content. - content_language: - Optional. Specifies the natural languages used by this resource. - content_md5: - Optional. An MD5 hash of the blob content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. If the two hashes do not match, the - operation will fail with error code 400 (Bad Request). - cache_control: - Optional. The Blob service stores this value but does not use or - modify it. - x_ms_blob_content_type: Optional. Set the blob's content type. - x_ms_blob_content_encoding: Optional. Set the blob's content encoding. - x_ms_blob_content_language: Optional. Set the blob's content language. - x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. - x_ms_blob_cache_control: Optional. Sets the blob's cache control. - x_ms_meta_name_values: A dict containing name, value for metadata. - x_ms_lease_id: Required if the blob has an active lease. - x_ms_blob_sequence_number: - Optional. Set for page blobs only. The sequence number is a - user-controlled value that you can use to track requests. The - value of the sequence number must be between 0 and 2^63 - 1. The - default value is 0. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob, or None if the total size is unknown. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('blob', blob) - _validate_type_bytes('blob', blob) - - if index < 0: - raise TypeError(_ERROR_VALUE_NEGATIVE.format('index')) - - if count is None or count < 0: - count = len(blob) - index - - stream = BytesIO(blob) - stream.seek(index) - - self.put_page_blob_from_file(container_name, - blob_name, - stream, - count, - content_encoding, - content_language, - content_md5, - cache_control, - x_ms_blob_content_type, - x_ms_blob_content_encoding, - x_ms_blob_content_language, - x_ms_blob_content_md5, - x_ms_blob_cache_control, - x_ms_meta_name_values, - x_ms_lease_id, - x_ms_blob_sequence_number, - progress_callback) - - def get_blob(self, container_name, blob_name, snapshot=None, - x_ms_range=None, x_ms_lease_id=None, - x_ms_range_get_content_md5=None): - ''' - Reads or downloads a blob from the system, including its metadata and - properties. - - See get_blob_to_* for high level functions that handle the download - of large blobs with automatic chunking and progress notifications. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_range: - Optional. Return only the bytes of the blob in the specified range. - x_ms_lease_id: Required if the blob has an active lease. - x_ms_range_get_content_md5: - Optional. When this header is set to true and specified together - with the Range header, the service returns the MD5 hash for the - range, as long as the range is less than or equal to 4 MB in size. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-range', _str_or_none(x_ms_range)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-range-get-content-md5', - _str_or_none(x_ms_range_get_content_md5)) - ] - request.query = [('snapshot', _str_or_none(snapshot))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request, None) - - return _create_blob_result(response) - - def get_blob_to_path(self, container_name, blob_name, file_path, - open_mode='wb', snapshot=None, x_ms_lease_id=None, - progress_callback=None): - ''' - Downloads a blob to a file path, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of existing blob. - file_path: Path of file to write to. - open_mode: Mode to use when opening the file. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('file_path', file_path) - _validate_not_none('open_mode', open_mode) - - with open(file_path, open_mode) as stream: - self.get_blob_to_file(container_name, - blob_name, - stream, - snapshot, - x_ms_lease_id, - progress_callback) - - def get_blob_to_file(self, container_name, blob_name, stream, - snapshot=None, x_ms_lease_id=None, - progress_callback=None): - ''' - Downloads a blob to a file/stream, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of existing blob. - stream: Opened file/stream to write to. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('stream', stream) - - props = self.get_blob_properties(container_name, blob_name) - blob_size = int(props['content-length']) - - if blob_size < self._BLOB_MAX_DATA_SIZE: - if progress_callback: - progress_callback(0, blob_size) - - data = self.get_blob(container_name, - blob_name, - snapshot, - x_ms_lease_id=x_ms_lease_id) - - stream.write(data) - - if progress_callback: - progress_callback(blob_size, blob_size) - else: - if progress_callback: - progress_callback(0, blob_size) - - index = 0 - while index < blob_size: - chunk_range = 'bytes={}-{}'.format( - index, - index + self._BLOB_MAX_CHUNK_DATA_SIZE - 1) - data = self.get_blob( - container_name, blob_name, x_ms_range=chunk_range) - length = len(data) - index += length - if length > 0: - stream.write(data) - if progress_callback: - progress_callback(index, blob_size) - if length < self._BLOB_MAX_CHUNK_DATA_SIZE: - break - else: - break - - def get_blob_to_bytes(self, container_name, blob_name, snapshot=None, - x_ms_lease_id=None, progress_callback=None): - ''' - Downloads a blob as an array of bytes, with automatic chunking and - progress notifications. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - - stream = BytesIO() - self.get_blob_to_file(container_name, - blob_name, - stream, - snapshot, - x_ms_lease_id, - progress_callback) - - return stream.getvalue() - - def get_blob_to_text(self, container_name, blob_name, text_encoding='utf-8', - snapshot=None, x_ms_lease_id=None, - progress_callback=None): - ''' - Downloads a blob as unicode text, with automatic chunking and progress - notifications. - - container_name: Name of existing container. - blob_name: Name of existing blob. - text_encoding: Encoding to use when decoding the blob data. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_lease_id: Required if the blob has an active lease. - progress_callback: - Callback for progress with signature function(current, total) where - current is the number of bytes transfered so far, and total is the - size of the blob. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('text_encoding', text_encoding) - - result = self.get_blob_to_bytes(container_name, - blob_name, - snapshot, - x_ms_lease_id, - progress_callback) - - return result.decode(text_encoding) - - def get_blob_metadata(self, container_name, blob_name, snapshot=None, - x_ms_lease_id=None): - ''' - Returns all user-defined metadata for the specified blob or snapshot. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=metadata' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.query = [('snapshot', _str_or_none(snapshot))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_prefix(response, prefixes=['x-ms-meta']) - - def set_blob_metadata(self, container_name, blob_name, - x_ms_meta_name_values=None, x_ms_lease_id=None): - ''' - Sets user-defined metadata for the specified blob as one or more - name-value pairs. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_meta_name_values: Dict containing name and value pairs. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=metadata' - request.headers = [ - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def lease_blob(self, container_name, blob_name, x_ms_lease_action, - x_ms_lease_id=None, x_ms_lease_duration=60, - x_ms_lease_break_period=None, x_ms_proposed_lease_id=None): - ''' - Establishes and manages a one-minute lock on a blob for write - operations. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_lease_action: - Required. Possible values: acquire|renew|release|break|change - x_ms_lease_id: Required if the blob has an active lease. - x_ms_lease_duration: - Specifies the duration of the lease, in seconds, or negative one - (-1) for a lease that never expires. A non-infinite lease can be - between 15 and 60 seconds. A lease duration cannot be changed - using renew or change. For backwards compatibility, the default is - 60, and the value is only used on an acquire operation. - x_ms_lease_break_period: - Optional. For a break operation, this is the proposed duration of - seconds that the lease should continue before it is broken, between - 0 and 60 seconds. This break period is only used if it is shorter - than the time remaining on the lease. If longer, the time remaining - on the lease is used. A new lease will not be available before the - break period has expired, but the lease may be held for longer than - the break period. If this header does not appear with a break - operation, a fixed-duration lease breaks after the remaining lease - period elapses, and an infinite lease breaks immediately. - x_ms_proposed_lease_id: - Optional for acquire, required for change. Proposed lease ID, in a - GUID string format. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('x_ms_lease_action', x_ms_lease_action) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=lease' - request.headers = [ - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-lease-action', _str_or_none(x_ms_lease_action)), - ('x-ms-lease-duration', _str_or_none(x_ms_lease_duration\ - if x_ms_lease_action == 'acquire' else None)), - ('x-ms-lease-break-period', _str_or_none(x_ms_lease_break_period)), - ('x-ms-proposed-lease-id', _str_or_none(x_ms_proposed_lease_id)), - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_filter( - response, - filter=['x-ms-lease-id', 'x-ms-lease-time']) - - def snapshot_blob(self, container_name, blob_name, - x_ms_meta_name_values=None, if_modified_since=None, - if_unmodified_since=None, if_match=None, - if_none_match=None, x_ms_lease_id=None): - ''' - Creates a read-only snapshot of a blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_meta_name_values: Optional. Dict containing name and value pairs. - if_modified_since: Optional. Datetime string. - if_unmodified_since: DateTime string. - if_match: - Optional. snapshot the blob only if its ETag value matches the - value specified. - if_none_match: Optional. An ETag value - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=snapshot' - request.headers = [ - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('If-Modified-Since', _str_or_none(if_modified_since)), - ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), - ('If-Match', _str_or_none(if_match)), - ('If-None-Match', _str_or_none(if_none_match)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_filter( - response, - filter=['x-ms-snapshot', 'etag', 'last-modified']) - - def copy_blob(self, container_name, blob_name, x_ms_copy_source, - x_ms_meta_name_values=None, - x_ms_source_if_modified_since=None, - x_ms_source_if_unmodified_since=None, - x_ms_source_if_match=None, x_ms_source_if_none_match=None, - if_modified_since=None, if_unmodified_since=None, - if_match=None, if_none_match=None, x_ms_lease_id=None, - x_ms_source_lease_id=None): - ''' - Copies a blob to a destination within the storage account. - - container_name: Name of existing container. - blob_name: Name of existing blob. - x_ms_copy_source: - URL up to 2 KB in length that specifies a blob. A source blob in - the same account can be private, but a blob in another account - must be public or accept credentials included in this URL, such as - a Shared Access Signature. Examples: - https://myaccount.blob.core.windows.net/mycontainer/myblob - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime> - x_ms_meta_name_values: Optional. Dict containing name and value pairs. - x_ms_source_if_modified_since: - Optional. An ETag value. Specify this conditional header to copy - the source blob only if its ETag matches the value specified. - x_ms_source_if_unmodified_since: - Optional. An ETag value. Specify this conditional header to copy - the blob only if its ETag does not match the value specified. - x_ms_source_if_match: - Optional. A DateTime value. Specify this conditional header to - copy the blob only if the source blob has been modified since the - specified date/time. - x_ms_source_if_none_match: - Optional. An ETag value. Specify this conditional header to copy - the source blob only if its ETag matches the value specified. - if_modified_since: Optional. Datetime string. - if_unmodified_since: DateTime string. - if_match: - Optional. Snapshot the blob only if its ETag value matches the - value specified. - if_none_match: Optional. An ETag value - x_ms_lease_id: Required if the blob has an active lease. - x_ms_source_lease_id: - Optional. Specify this to perform the Copy Blob operation only if - the lease ID given matches the active lease ID of the source blob. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('x_ms_copy_source', x_ms_copy_source) - - if x_ms_copy_source.startswith('/'): - # Backwards compatibility for earlier versions of the SDK where - # the copy source can be in the following formats: - # - Blob in named container: - # /accountName/containerName/blobName - # - Snapshot in named container: - # /accountName/containerName/blobName?snapshot=<DateTime> - # - Blob in root container: - # /accountName/blobName - # - Snapshot in root container: - # /accountName/blobName?snapshot=<DateTime> - account, _, source =\ - x_ms_copy_source.partition('/')[2].partition('/') - x_ms_copy_source = self.protocol + '://' + \ - account + self.host_base + '/' + source - - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [ - ('x-ms-copy-source', _str_or_none(x_ms_copy_source)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-source-if-modified-since', - _str_or_none(x_ms_source_if_modified_since)), - ('x-ms-source-if-unmodified-since', - _str_or_none(x_ms_source_if_unmodified_since)), - ('x-ms-source-if-match', _str_or_none(x_ms_source_if_match)), - ('x-ms-source-if-none-match', - _str_or_none(x_ms_source_if_none_match)), - ('If-Modified-Since', _str_or_none(if_modified_since)), - ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), - ('If-Match', _str_or_none(if_match)), - ('If-None-Match', _str_or_none(if_none_match)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-source-lease-id', _str_or_none(x_ms_source_lease_id)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict(response) - - def abort_copy_blob(self, container_name, blob_name, x_ms_copy_id, - x_ms_lease_id=None): - ''' - Aborts a pending copy_blob operation, and leaves a destination blob - with zero length and full metadata. - - container_name: Name of destination container. - blob_name: Name of destination blob. - x_ms_copy_id: - Copy identifier provided in the x-ms-copy-id of the original - copy_blob operation. - x_ms_lease_id: - Required if the destination blob has an active infinite lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('x_ms_copy_id', x_ms_copy_id) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + \ - _str(blob_name) + '?comp=copy©id=' + \ - _str(x_ms_copy_id) - request.headers = [ - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-copy-action', 'abort'), - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def delete_blob(self, container_name, blob_name, snapshot=None, - x_ms_lease_id=None): - ''' - Marks the specified blob or snapshot for deletion. The blob is later - deleted during garbage collection. - - To mark a specific snapshot for deletion provide the date/time of the - snapshot via the snapshot parameter. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to delete. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.query = [('snapshot', _str_or_none(snapshot))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def put_block(self, container_name, blob_name, block, blockid, - content_md5=None, x_ms_lease_id=None): - ''' - Creates a new block to be committed as part of a blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - block: Content of the block. - blockid: - Required. A value that identifies the block. The string must be - less than or equal to 64 bytes in size. - content_md5: - Optional. An MD5 hash of the block content. This hash is used to - verify the integrity of the blob during transport. When this - header is specified, the storage service checks the hash that has - arrived with the one that was sent. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('block', block) - _validate_not_none('blockid', blockid) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=block' - request.headers = [ - ('Content-MD5', _str_or_none(content_md5)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.query = [('blockid', _encode_base64(_str_or_none(blockid)))] - request.body = _get_request_body_bytes_only('block', block) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def put_block_list(self, container_name, blob_name, block_list, - content_md5=None, x_ms_blob_cache_control=None, - x_ms_blob_content_type=None, - x_ms_blob_content_encoding=None, - x_ms_blob_content_language=None, - x_ms_blob_content_md5=None, x_ms_meta_name_values=None, - x_ms_lease_id=None): - ''' - Writes a blob by specifying the list of block IDs that make up the - blob. In order to be written as part of a blob, a block must have been - successfully written to the server in a prior Put Block (REST API) - operation. - - container_name: Name of existing container. - blob_name: Name of existing blob. - block_list: A str list containing the block ids. - content_md5: - Optional. An MD5 hash of the block content. This hash is used to - verify the integrity of the blob during transport. When this header - is specified, the storage service checks the hash that has arrived - with the one that was sent. - x_ms_blob_cache_control: - Optional. Sets the blob's cache control. If specified, this - property is stored with the blob and returned with a read request. - x_ms_blob_content_type: - Optional. Sets the blob's content type. If specified, this property - is stored with the blob and returned with a read request. - x_ms_blob_content_encoding: - Optional. Sets the blob's content encoding. If specified, this - property is stored with the blob and returned with a read request. - x_ms_blob_content_language: - Optional. Set the blob's content language. If specified, this - property is stored with the blob and returned with a read request. - x_ms_blob_content_md5: - Optional. An MD5 hash of the blob content. Note that this hash is - not validated, as the hashes for the individual blocks were - validated when each was uploaded. - x_ms_meta_name_values: Optional. Dict containing name and value pairs. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('block_list', block_list) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=blocklist' - request.headers = [ - ('Content-MD5', _str_or_none(content_md5)), - ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), - ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), - ('x-ms-blob-content-encoding', - _str_or_none(x_ms_blob_content_encoding)), - ('x-ms-blob-content-language', - _str_or_none(x_ms_blob_content_language)), - ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), - ('x-ms-meta-name-values', x_ms_meta_name_values), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.body = _get_request_body( - _convert_block_list_to_xml(block_list)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def get_block_list(self, container_name, blob_name, snapshot=None, - blocklisttype=None, x_ms_lease_id=None): - ''' - Retrieves the list of blocks that have been uploaded as part of a - block blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. Datetime to determine the time to retrieve the blocks. - blocklisttype: - Specifies whether to return the list of committed blocks, the list - of uncommitted blocks, or both lists together. Valid values are: - committed, uncommitted, or all. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=blocklist' - request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] - request.query = [ - ('snapshot', _str_or_none(snapshot)), - ('blocklisttype', _str_or_none(blocklisttype)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _convert_response_to_block_list(response) - - def put_page(self, container_name, blob_name, page, x_ms_range, - x_ms_page_write, timeout=None, content_md5=None, - x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, - x_ms_if_sequence_number_lt=None, - x_ms_if_sequence_number_eq=None, - if_modified_since=None, if_unmodified_since=None, - if_match=None, if_none_match=None): - ''' - Writes a range of pages to a page blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - page: Content of the page. - x_ms_range: - Required. Specifies the range of bytes to be written as a page. - Both the start and end of the range must be specified. Must be in - format: bytes=startByte-endByte. Given that pages must be aligned - with 512-byte boundaries, the start offset must be a modulus of - 512 and the end offset must be a modulus of 512-1. Examples of - valid byte ranges are 0-511, 512-1023, etc. - x_ms_page_write: - Required. You may specify one of the following options: - update (lower case): - Writes the bytes specified by the request body into the - specified range. The Range and Content-Length headers must - match to perform the update. - clear (lower case): - Clears the specified range and releases the space used in - storage for that range. To clear a range, set the - Content-Length header to zero, and the Range header to a - value that indicates the range to clear, up to maximum - blob size. - timeout: the timeout parameter is expressed in seconds. - content_md5: - Optional. An MD5 hash of the page content. This hash is used to - verify the integrity of the page during transport. When this header - is specified, the storage service compares the hash of the content - that has arrived with the header value that was sent. If the two - hashes do not match, the operation will fail with error code 400 - (Bad Request). - x_ms_lease_id: Required if the blob has an active lease. - x_ms_if_sequence_number_lte: - Optional. If the blob's sequence number is less than or equal to - the specified value, the request proceeds; otherwise it fails. - x_ms_if_sequence_number_lt: - Optional. If the blob's sequence number is less than the specified - value, the request proceeds; otherwise it fails. - x_ms_if_sequence_number_eq: - Optional. If the blob's sequence number is equal to the specified - value, the request proceeds; otherwise it fails. - if_modified_since: - Optional. A DateTime value. Specify this conditional header to - write the page only if the blob has been modified since the - specified date/time. If the blob has not been modified, the Blob - service fails. - if_unmodified_since: - Optional. A DateTime value. Specify this conditional header to - write the page only if the blob has not been modified since the - specified date/time. If the blob has been modified, the Blob - service fails. - if_match: - Optional. An ETag value. Specify an ETag value for this conditional - header to write the page only if the blob's ETag value matches the - value specified. If the values do not match, the Blob service fails. - if_none_match: - Optional. An ETag value. Specify an ETag value for this conditional - header to write the page only if the blob's ETag value does not - match the value specified. If the values are identical, the Blob - service fails. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - _validate_not_none('page', page) - _validate_not_none('x_ms_range', x_ms_range) - _validate_not_none('x_ms_page_write', x_ms_page_write) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=page' - request.headers = [ - ('x-ms-range', _str_or_none(x_ms_range)), - ('Content-MD5', _str_or_none(content_md5)), - ('x-ms-page-write', _str_or_none(x_ms_page_write)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), - ('x-ms-if-sequence-number-le', - _str_or_none(x_ms_if_sequence_number_lte)), - ('x-ms-if-sequence-number-lt', - _str_or_none(x_ms_if_sequence_number_lt)), - ('x-ms-if-sequence-number-eq', - _str_or_none(x_ms_if_sequence_number_eq)), - ('If-Modified-Since', _str_or_none(if_modified_since)), - ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), - ('If-Match', _str_or_none(if_match)), - ('If-None-Match', _str_or_none(if_none_match)) - ] - request.query = [('timeout', _int_or_none(timeout))] - request.body = _get_request_body_bytes_only('page', page) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def get_page_ranges(self, container_name, blob_name, snapshot=None, - range=None, x_ms_range=None, x_ms_lease_id=None): - ''' - Retrieves the page ranges for a blob. - - container_name: Name of existing container. - blob_name: Name of existing blob. - snapshot: - Optional. The snapshot parameter is an opaque DateTime value that, - when present, specifies the blob snapshot to retrieve information - from. - range: - Optional. Specifies the range of bytes over which to list ranges, - inclusively. If omitted, then all ranges for the blob are returned. - x_ms_range: - Optional. Specifies the range of bytes to be written as a page. - Both the start and end of the range must be specified. Must be in - format: bytes=startByte-endByte. Given that pages must be aligned - with 512-byte boundaries, the start offset must be a modulus of - 512 and the end offset must be a modulus of 512-1. Examples of - valid byte ranges are 0-511, 512-1023, etc. - x_ms_lease_id: Required if the blob has an active lease. - ''' - _validate_not_none('container_name', container_name) - _validate_not_none('blob_name', blob_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + \ - _str(container_name) + '/' + _str(blob_name) + '?comp=pagelist' - request.headers = [ - ('Range', _str_or_none(range)), - ('x-ms-range', _str_or_none(x_ms_range)), - ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) - ] - request.query = [('snapshot', _str_or_none(snapshot))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_blob_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_simple_list(response, PageList, PageRange, "page_ranges") +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + WindowsAzureError, + BLOB_SERVICE_HOST_BASE, + DEV_BLOB_HOST, + _ERROR_VALUE_NEGATIVE, + _ERROR_PAGE_BLOB_SIZE_ALIGNMENT, + _convert_class_to_xml, + _dont_fail_not_exist, + _dont_fail_on_exist, + _encode_base64, + _get_request_body, + _get_request_body_bytes_only, + _int_or_none, + _parse_enum_results_list, + _parse_response, + _parse_response_for_dict, + _parse_response_for_dict_filter, + _parse_response_for_dict_prefix, + _parse_simple_list, + _str, + _str_or_none, + _update_request_uri_query_local_storage, + _validate_type_bytes, + _validate_not_none, + ) +from azure.http import HTTPRequest +from azure.storage import ( + Container, + ContainerEnumResults, + PageList, + PageRange, + SignedIdentifiers, + StorageServiceProperties, + _convert_block_list_to_xml, + _convert_response_to_block_list, + _create_blob_result, + _parse_blob_enum_results_list, + _update_storage_blob_header, + ) +from azure.storage.storageclient import _StorageClient +from os import path +import sys +if sys.version_info >= (3,): + from io import BytesIO +else: + from cStringIO import StringIO as BytesIO + +# Keep this value sync with _ERROR_PAGE_BLOB_SIZE_ALIGNMENT +_PAGE_SIZE = 512 + + +class BlobService(_StorageClient): + + ''' + This is the main class managing Blob resources. + ''' + + def __init__(self, account_name=None, account_key=None, protocol='https', + host_base=BLOB_SERVICE_HOST_BASE, dev_host=DEV_BLOB_HOST): + ''' + account_name: your storage account name, required for all operations. + account_key: your storage account key, required for all operations. + protocol: Optional. Protocol. Defaults to https. + host_base: + Optional. Live host base url. Defaults to Azure url. Override this + for on-premise. + dev_host: Optional. Dev host url. Defaults to localhost. + ''' + self._BLOB_MAX_DATA_SIZE = 64 * 1024 * 1024 + self._BLOB_MAX_CHUNK_DATA_SIZE = 4 * 1024 * 1024 + super(BlobService, self).__init__( + account_name, account_key, protocol, host_base, dev_host) + + def make_blob_url(self, container_name, blob_name, account_name=None, + protocol=None, host_base=None): + ''' + Creates the url to access a blob. + + container_name: Name of container. + blob_name: Name of blob. + account_name: + Name of the storage account. If not specified, uses the account + specified when BlobService was initialized. + protocol: + Protocol to use: 'http' or 'https'. If not specified, uses the + protocol specified when BlobService was initialized. + host_base: + Live host base url. If not specified, uses the host base specified + when BlobService was initialized. + ''' + + if not account_name: + account_name = self.account_name + if not protocol: + protocol = self.protocol + if not host_base: + host_base = self.host_base + + return '{0}://{1}{2}/{3}/{4}'.format(protocol, + account_name, + host_base, + container_name, + blob_name) + + def list_containers(self, prefix=None, marker=None, maxresults=None, + include=None): + ''' + The List Containers operation returns a list of the containers under + the specified account. + + prefix: + Optional. Filters the results to return only containers whose names + begin with the specified prefix. + marker: + Optional. A string value that identifies the portion of the list to + be returned with the next list operation. + maxresults: + Optional. Specifies the maximum number of containers to return. + include: + Optional. Include this parameter to specify that the container's + metadata be returned as part of the response body. set this + parameter to string 'metadata' to get container's metadata. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/?comp=list' + request.query = [ + ('prefix', _str_or_none(prefix)), + ('marker', _str_or_none(marker)), + ('maxresults', _int_or_none(maxresults)), + ('include', _str_or_none(include)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_enum_results_list(response, + ContainerEnumResults, + "Containers", + Container) + + def create_container(self, container_name, x_ms_meta_name_values=None, + x_ms_blob_public_access=None, fail_on_exist=False): + ''' + Creates a new container under the specified account. If the container + with the same name already exists, the operation fails. + + container_name: Name of container to create. + x_ms_meta_name_values: + Optional. A dict with name_value pairs to associate with the + container as metadata. Example:{'Category':'test'} + x_ms_blob_public_access: + Optional. Possible values include: container, blob + fail_on_exist: + specify whether to throw an exception when the container exists. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '?restype=container' + request.headers = [ + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-blob-public-access', _str_or_none(x_ms_blob_public_access)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_container_properties(self, container_name, x_ms_lease_id=None): + ''' + Returns all user-defined metadata and system properties for the + specified container. + + container_name: Name of existing container. + x_ms_lease_id: + If specified, get_container_properties only succeeds if the + container's lease is active and matches this ID. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '?restype=container' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict(response) + + def get_container_metadata(self, container_name, x_ms_lease_id=None): + ''' + Returns all user-defined metadata for the specified container. The + metadata will be in returned dictionary['x-ms-meta-(name)']. + + container_name: Name of existing container. + x_ms_lease_id: + If specified, get_container_metadata only succeeds if the + container's lease is active and matches this ID. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=metadata' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_prefix(response, prefixes=['x-ms-meta']) + + def set_container_metadata(self, container_name, + x_ms_meta_name_values=None, x_ms_lease_id=None): + ''' + Sets one or more user-defined name-value pairs for the specified + container. + + container_name: Name of existing container. + x_ms_meta_name_values: + A dict containing name, value for metadata. + Example: {'category':'test'} + x_ms_lease_id: + If specified, set_container_metadata only succeeds if the + container's lease is active and matches this ID. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=metadata' + request.headers = [ + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def get_container_acl(self, container_name, x_ms_lease_id=None): + ''' + Gets the permissions for the specified container. + + container_name: Name of existing container. + x_ms_lease_id: + If specified, get_container_acl only succeeds if the + container's lease is active and matches this ID. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=acl' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response(response, SignedIdentifiers) + + def set_container_acl(self, container_name, signed_identifiers=None, + x_ms_blob_public_access=None, x_ms_lease_id=None): + ''' + Sets the permissions for the specified container. + + container_name: Name of existing container. + signed_identifiers: SignedIdentifers instance + x_ms_blob_public_access: + Optional. Possible values include: container, blob + x_ms_lease_id: + If specified, set_container_acl only succeeds if the + container's lease is active and matches this ID. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=acl' + request.headers = [ + ('x-ms-blob-public-access', _str_or_none(x_ms_blob_public_access)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ] + request.body = _get_request_body( + _convert_class_to_xml(signed_identifiers)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def delete_container(self, container_name, fail_not_exist=False, + x_ms_lease_id=None): + ''' + Marks the specified container for deletion. + + container_name: Name of container to delete. + fail_not_exist: + Specify whether to throw an exception when the container doesn't + exist. + x_ms_lease_id: Required if the container has an active lease. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '?restype=container' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def lease_container(self, container_name, x_ms_lease_action, + x_ms_lease_id=None, x_ms_lease_duration=60, + x_ms_lease_break_period=None, + x_ms_proposed_lease_id=None): + ''' + Establishes and manages a lock on a container for delete operations. + The lock duration can be 15 to 60 seconds, or can be infinite. + + container_name: Name of existing container. + x_ms_lease_action: + Required. Possible values: acquire|renew|release|break|change + x_ms_lease_id: Required if the container has an active lease. + x_ms_lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. For backwards compatibility, the default is + 60, and the value is only used on an acquire operation. + x_ms_lease_break_period: + Optional. For a break operation, this is the proposed duration of + seconds that the lease should continue before it is broken, between + 0 and 60 seconds. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining + on the lease is used. A new lease will not be available before the + break period has expired, but the lease may be held for longer than + the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease + period elapses, and an infinite lease breaks immediately. + x_ms_proposed_lease_id: + Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('x_ms_lease_action', x_ms_lease_action) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=lease' + request.headers = [ + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-lease-action', _str_or_none(x_ms_lease_action)), + ('x-ms-lease-duration', + _str_or_none( + x_ms_lease_duration if x_ms_lease_action == 'acquire'\ + else None)), + ('x-ms-lease-break-period', _str_or_none(x_ms_lease_break_period)), + ('x-ms-proposed-lease-id', _str_or_none(x_ms_proposed_lease_id)), + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_filter( + response, + filter=['x-ms-lease-id', 'x-ms-lease-time']) + + def list_blobs(self, container_name, prefix=None, marker=None, + maxresults=None, include=None, delimiter=None): + ''' + Returns the list of blobs under the specified container. + + container_name: Name of existing container. + prefix: + Optional. Filters the results to return only blobs whose names + begin with the specified prefix. + marker: + Optional. A string value that identifies the portion of the list + to be returned with the next list operation. The operation returns + a marker value within the response body if the list returned was + not complete. The marker value may then be used in a subsequent + call to request the next set of list items. The marker value is + opaque to the client. + maxresults: + Optional. Specifies the maximum number of blobs to return, + including all BlobPrefix elements. If the request does not specify + maxresults or specifies a value greater than 5,000, the server will + return up to 5,000 items. Setting maxresults to a value less than + or equal to zero results in error response code 400 (Bad Request). + include: + Optional. Specifies one or more datasets to include in the + response. To specify more than one of these options on the URI, + you must separate each option with a comma. Valid values are: + snapshots: + Specifies that snapshots should be included in the + enumeration. Snapshots are listed from oldest to newest in + the response. + metadata: + Specifies that blob metadata be returned in the response. + uncommittedblobs: + Specifies that blobs for which blocks have been uploaded, + but which have not been committed using Put Block List + (REST API), be included in the response. + copy: + Version 2012-02-12 and newer. Specifies that metadata + related to any current or previous Copy Blob operation + should be included in the response. + delimiter: + Optional. When the request includes this parameter, the operation + returns a BlobPrefix element in the response body that acts as a + placeholder for all blobs whose names begin with the same + substring up to the appearance of the delimiter character. The + delimiter may be a single character or a string. + ''' + _validate_not_none('container_name', container_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '?restype=container&comp=list' + request.query = [ + ('prefix', _str_or_none(prefix)), + ('delimiter', _str_or_none(delimiter)), + ('marker', _str_or_none(marker)), + ('maxresults', _int_or_none(maxresults)), + ('include', _str_or_none(include)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_blob_enum_results_list(response) + + def set_blob_service_properties(self, storage_service_properties, + timeout=None): + ''' + Sets the properties of a storage account's Blob service, including + Windows Azure Storage Analytics. You can also use this operation to + set the default request version for all incoming requests that do not + have a version specified. + + storage_service_properties: a StorageServiceProperties object. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('storage_service_properties', + storage_service_properties) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.query = [('timeout', _int_or_none(timeout))] + request.body = _get_request_body( + _convert_class_to_xml(storage_service_properties)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def get_blob_service_properties(self, timeout=None): + ''' + Gets the properties of a storage account's Blob service, including + Windows Azure Storage Analytics. + + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response(response, StorageServiceProperties) + + def get_blob_properties(self, container_name, blob_name, + x_ms_lease_id=None): + ''' + Returns all user-defined metadata, standard HTTP properties, and + system properties for the blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'HEAD' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + + response = self._perform_request(request) + + return _parse_response_for_dict(response) + + def set_blob_properties(self, container_name, blob_name, + x_ms_blob_cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_md5=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_lease_id=None): + ''' + Sets system properties on the blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_blob_cache_control: + Optional. Modifies the cache control string for the blob. + x_ms_blob_content_type: Optional. Sets the blob's content type. + x_ms_blob_content_md5: Optional. Sets the blob's MD5 hash. + x_ms_blob_content_encoding: Optional. Sets the blob's content encoding. + x_ms_blob_content_language: Optional. Sets the blob's content language. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=properties' + request.headers = [ + ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), + ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), + ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), + ('x-ms-blob-content-encoding', + _str_or_none(x_ms_blob_content_encoding)), + ('x-ms-blob-content-language', + _str_or_none(x_ms_blob_content_language)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def put_blob(self, container_name, blob_name, blob, x_ms_blob_type, + content_encoding=None, content_language=None, + content_md5=None, cache_control=None, + x_ms_blob_content_type=None, x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, x_ms_meta_name_values=None, + x_ms_lease_id=None, x_ms_blob_content_length=None, + x_ms_blob_sequence_number=None): + ''' + Creates a new block blob or page blob, or updates the content of an + existing block blob. + + See put_block_blob_from_* and put_page_blob_from_* for high level + functions that handle the creation and upload of large blobs with + automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + blob: + For BlockBlob: + Content of blob as bytes (size < 64MB). For larger size, you + must call put_block and put_block_list to set content of blob. + For PageBlob: + Use None and call put_page to set content of blob. + x_ms_blob_type: Required. Could be BlockBlob or PageBlob. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_blob_content_length: + Required for page blobs. This header specifies the maximum size + for the page blob, up to 1 TB. The page blob size must be aligned + to a 512-byte boundary. + x_ms_blob_sequence_number: + Optional. Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The + value of the sequence number must be between 0 and 2^63 - 1. The + default value is 0. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('x_ms_blob_type', x_ms_blob_type) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' + request.headers = [ + ('x-ms-blob-type', _str_or_none(x_ms_blob_type)), + ('Content-Encoding', _str_or_none(content_encoding)), + ('Content-Language', _str_or_none(content_language)), + ('Content-MD5', _str_or_none(content_md5)), + ('Cache-Control', _str_or_none(cache_control)), + ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), + ('x-ms-blob-content-encoding', + _str_or_none(x_ms_blob_content_encoding)), + ('x-ms-blob-content-language', + _str_or_none(x_ms_blob_content_language)), + ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), + ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-blob-content-length', + _str_or_none(x_ms_blob_content_length)), + ('x-ms-blob-sequence-number', + _str_or_none(x_ms_blob_sequence_number)) + ] + request.body = _get_request_body_bytes_only('blob', blob) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def put_block_blob_from_path(self, container_name, blob_name, file_path, + content_encoding=None, content_language=None, + content_md5=None, cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, progress_callback=None): + ''' + Creates a new block blob from a file path, or updates the content of an + existing block blob, with automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + file_path: Path of the file to upload as the blob content. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('file_path', file_path) + + count = path.getsize(file_path) + with open(file_path, 'rb') as stream: + self.put_block_blob_from_file(container_name, + blob_name, + stream, + count, + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + progress_callback) + + def put_block_blob_from_file(self, container_name, blob_name, stream, + count=None, content_encoding=None, + content_language=None, content_md5=None, + cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, progress_callback=None): + ''' + Creates a new block blob from a file/stream, or updates the content of + an existing block blob, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + stream: Opened file/stream to upload as the blob content. + count: + Number of bytes to read from the stream. This is optional, but + should be supplied for optimal performance. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('stream', stream) + + if count and count < self._BLOB_MAX_DATA_SIZE: + if progress_callback: + progress_callback(0, count) + + data = stream.read(count) + self.put_blob(container_name, + blob_name, + data, + 'BlockBlob', + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id) + + if progress_callback: + progress_callback(count, count) + else: + if progress_callback: + progress_callback(0, count) + + self.put_blob(container_name, + blob_name, + None, + 'BlockBlob', + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id) + + remain_bytes = count + block_ids = [] + block_index = 0 + index = 0 + while True: + request_count = self._BLOB_MAX_CHUNK_DATA_SIZE\ + if remain_bytes is None else min( + remain_bytes, + self._BLOB_MAX_CHUNK_DATA_SIZE) + data = stream.read(request_count) + if data: + length = len(data) + index += length + remain_bytes = remain_bytes - \ + length if remain_bytes else None + block_id = '{0:08d}'.format(block_index) + self.put_block(container_name, blob_name, + data, block_id, x_ms_lease_id=x_ms_lease_id) + block_ids.append(block_id) + block_index += 1 + if progress_callback: + progress_callback(index, count) + else: + break + + self.put_block_list(container_name, blob_name, block_ids, + content_md5, x_ms_blob_cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_meta_name_values, + x_ms_lease_id) + + def put_block_blob_from_bytes(self, container_name, blob_name, blob, + index=0, count=None, content_encoding=None, + content_language=None, content_md5=None, + cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, progress_callback=None): + ''' + Creates a new block blob from an array of bytes, or updates the content + of an existing block blob, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + blob: Content of blob as an array of bytes. + index: Start index in the array of bytes. + count: + Number of bytes to upload. Set to None or negative value to upload + all bytes starting from index. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('blob', blob) + _validate_not_none('index', index) + _validate_type_bytes('blob', blob) + + if index < 0: + raise TypeError(_ERROR_VALUE_NEGATIVE.format('index')) + + if count is None or count < 0: + count = len(blob) - index + + if count < self._BLOB_MAX_DATA_SIZE: + if progress_callback: + progress_callback(0, count) + + data = blob[index: index + count] + self.put_blob(container_name, + blob_name, + data, + 'BlockBlob', + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id) + + if progress_callback: + progress_callback(count, count) + else: + stream = BytesIO(blob) + stream.seek(index) + + self.put_block_blob_from_file(container_name, + blob_name, + stream, + count, + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + progress_callback) + + def put_block_blob_from_text(self, container_name, blob_name, text, + text_encoding='utf-8', + content_encoding=None, content_language=None, + content_md5=None, cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, progress_callback=None): + ''' + Creates a new block blob from str/unicode, or updates the content of an + existing block blob, with automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + text: Text to upload to the blob. + text_encoding: Encoding to use to convert the text to bytes. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('text', text) + + if not isinstance(text, bytes): + _validate_not_none('text_encoding', text_encoding) + text = text.encode(text_encoding) + + self.put_block_blob_from_bytes(container_name, + blob_name, + text, + 0, + len(text), + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + progress_callback) + + def put_page_blob_from_path(self, container_name, blob_name, file_path, + content_encoding=None, content_language=None, + content_md5=None, cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, + x_ms_blob_sequence_number=None, + progress_callback=None): + ''' + Creates a new page blob from a file path, or updates the content of an + existing page blob, with automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + file_path: Path of the file to upload as the blob content. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_blob_sequence_number: + Optional. Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The + value of the sequence number must be between 0 and 2^63 - 1. The + default value is 0. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('file_path', file_path) + + count = path.getsize(file_path) + with open(file_path, 'rb') as stream: + self.put_page_blob_from_file(container_name, + blob_name, + stream, + count, + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + x_ms_blob_sequence_number, + progress_callback) + + def put_page_blob_from_file(self, container_name, blob_name, stream, count, + content_encoding=None, content_language=None, + content_md5=None, cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, + x_ms_blob_sequence_number=None, + progress_callback=None): + ''' + Creates a new page blob from a file/stream, or updates the content of an + existing page blob, with automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + stream: Opened file/stream to upload as the blob content. + count: + Number of bytes to read from the stream. This is required, a page + blob cannot be created if the count is unknown. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_blob_sequence_number: + Optional. Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The + value of the sequence number must be between 0 and 2^63 - 1. The + default value is 0. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('stream', stream) + _validate_not_none('count', count) + + if count < 0: + raise TypeError(_ERROR_VALUE_NEGATIVE.format('count')) + + if count % _PAGE_SIZE != 0: + raise TypeError(_ERROR_PAGE_BLOB_SIZE_ALIGNMENT.format(count)) + + if progress_callback: + progress_callback(0, count) + + self.put_blob(container_name, + blob_name, + b'', + 'PageBlob', + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + count, + x_ms_blob_sequence_number) + + remain_bytes = count + page_start = 0 + while True: + request_count = min(remain_bytes, self._BLOB_MAX_CHUNK_DATA_SIZE) + data = stream.read(request_count) + if data: + length = len(data) + remain_bytes = remain_bytes - length + page_end = page_start + length - 1 + self.put_page(container_name, + blob_name, + data, + 'bytes={0}-{1}'.format(page_start, page_end), + 'update', + x_ms_lease_id=x_ms_lease_id) + page_start = page_start + length + + if progress_callback: + progress_callback(page_start, count) + else: + break + + def put_page_blob_from_bytes(self, container_name, blob_name, blob, + index=0, count=None, content_encoding=None, + content_language=None, content_md5=None, + cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, + x_ms_blob_cache_control=None, + x_ms_meta_name_values=None, + x_ms_lease_id=None, + x_ms_blob_sequence_number=None, + progress_callback=None): + ''' + Creates a new page blob from an array of bytes, or updates the content + of an existing page blob, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of blob to create or update. + blob: Content of blob as an array of bytes. + index: Start index in the array of bytes. + count: + Number of bytes to upload. Set to None or negative value to upload + all bytes starting from index. + content_encoding: + Optional. Specifies which content encodings have been applied to + the blob. This value is returned to the client when the Get Blob + (REST API) operation is performed on the blob resource. The client + can use this value when returned to decode the blob content. + content_language: + Optional. Specifies the natural languages used by this resource. + content_md5: + Optional. An MD5 hash of the blob content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. If the two hashes do not match, the + operation will fail with error code 400 (Bad Request). + cache_control: + Optional. The Blob service stores this value but does not use or + modify it. + x_ms_blob_content_type: Optional. Set the blob's content type. + x_ms_blob_content_encoding: Optional. Set the blob's content encoding. + x_ms_blob_content_language: Optional. Set the blob's content language. + x_ms_blob_content_md5: Optional. Set the blob's MD5 hash. + x_ms_blob_cache_control: Optional. Sets the blob's cache control. + x_ms_meta_name_values: A dict containing name, value for metadata. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_blob_sequence_number: + Optional. Set for page blobs only. The sequence number is a + user-controlled value that you can use to track requests. The + value of the sequence number must be between 0 and 2^63 - 1. The + default value is 0. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob, or None if the total size is unknown. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('blob', blob) + _validate_type_bytes('blob', blob) + + if index < 0: + raise TypeError(_ERROR_VALUE_NEGATIVE.format('index')) + + if count is None or count < 0: + count = len(blob) - index + + stream = BytesIO(blob) + stream.seek(index) + + self.put_page_blob_from_file(container_name, + blob_name, + stream, + count, + content_encoding, + content_language, + content_md5, + cache_control, + x_ms_blob_content_type, + x_ms_blob_content_encoding, + x_ms_blob_content_language, + x_ms_blob_content_md5, + x_ms_blob_cache_control, + x_ms_meta_name_values, + x_ms_lease_id, + x_ms_blob_sequence_number, + progress_callback) + + def get_blob(self, container_name, blob_name, snapshot=None, + x_ms_range=None, x_ms_lease_id=None, + x_ms_range_get_content_md5=None): + ''' + Reads or downloads a blob from the system, including its metadata and + properties. + + See get_blob_to_* for high level functions that handle the download + of large blobs with automatic chunking and progress notifications. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_range: + Optional. Return only the bytes of the blob in the specified range. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_range_get_content_md5: + Optional. When this header is set to true and specified together + with the Range header, the service returns the MD5 hash for the + range, as long as the range is less than or equal to 4 MB in size. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' + request.headers = [ + ('x-ms-range', _str_or_none(x_ms_range)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-range-get-content-md5', + _str_or_none(x_ms_range_get_content_md5)) + ] + request.query = [('snapshot', _str_or_none(snapshot))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request, None) + + return _create_blob_result(response) + + def get_blob_to_path(self, container_name, blob_name, file_path, + open_mode='wb', snapshot=None, x_ms_lease_id=None, + progress_callback=None): + ''' + Downloads a blob to a file path, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of existing blob. + file_path: Path of file to write to. + open_mode: Mode to use when opening the file. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('file_path', file_path) + _validate_not_none('open_mode', open_mode) + + with open(file_path, open_mode) as stream: + self.get_blob_to_file(container_name, + blob_name, + stream, + snapshot, + x_ms_lease_id, + progress_callback) + + def get_blob_to_file(self, container_name, blob_name, stream, + snapshot=None, x_ms_lease_id=None, + progress_callback=None): + ''' + Downloads a blob to a file/stream, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of existing blob. + stream: Opened file/stream to write to. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('stream', stream) + + props = self.get_blob_properties(container_name, blob_name) + blob_size = int(props['content-length']) + + if blob_size < self._BLOB_MAX_DATA_SIZE: + if progress_callback: + progress_callback(0, blob_size) + + data = self.get_blob(container_name, + blob_name, + snapshot, + x_ms_lease_id=x_ms_lease_id) + + stream.write(data) + + if progress_callback: + progress_callback(blob_size, blob_size) + else: + if progress_callback: + progress_callback(0, blob_size) + + index = 0 + while index < blob_size: + chunk_range = 'bytes={0}-{1}'.format( + index, + index + self._BLOB_MAX_CHUNK_DATA_SIZE - 1) + data = self.get_blob( + container_name, blob_name, x_ms_range=chunk_range) + length = len(data) + index += length + if length > 0: + stream.write(data) + if progress_callback: + progress_callback(index, blob_size) + if length < self._BLOB_MAX_CHUNK_DATA_SIZE: + break + else: + break + + def get_blob_to_bytes(self, container_name, blob_name, snapshot=None, + x_ms_lease_id=None, progress_callback=None): + ''' + Downloads a blob as an array of bytes, with automatic chunking and + progress notifications. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + + stream = BytesIO() + self.get_blob_to_file(container_name, + blob_name, + stream, + snapshot, + x_ms_lease_id, + progress_callback) + + return stream.getvalue() + + def get_blob_to_text(self, container_name, blob_name, text_encoding='utf-8', + snapshot=None, x_ms_lease_id=None, + progress_callback=None): + ''' + Downloads a blob as unicode text, with automatic chunking and progress + notifications. + + container_name: Name of existing container. + blob_name: Name of existing blob. + text_encoding: Encoding to use when decoding the blob data. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_lease_id: Required if the blob has an active lease. + progress_callback: + Callback for progress with signature function(current, total) where + current is the number of bytes transfered so far, and total is the + size of the blob. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('text_encoding', text_encoding) + + result = self.get_blob_to_bytes(container_name, + blob_name, + snapshot, + x_ms_lease_id, + progress_callback) + + return result.decode(text_encoding) + + def get_blob_metadata(self, container_name, blob_name, snapshot=None, + x_ms_lease_id=None): + ''' + Returns all user-defined metadata for the specified blob or snapshot. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=metadata' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.query = [('snapshot', _str_or_none(snapshot))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_prefix(response, prefixes=['x-ms-meta']) + + def set_blob_metadata(self, container_name, blob_name, + x_ms_meta_name_values=None, x_ms_lease_id=None): + ''' + Sets user-defined metadata for the specified blob as one or more + name-value pairs. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_meta_name_values: Dict containing name and value pairs. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=metadata' + request.headers = [ + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def lease_blob(self, container_name, blob_name, x_ms_lease_action, + x_ms_lease_id=None, x_ms_lease_duration=60, + x_ms_lease_break_period=None, x_ms_proposed_lease_id=None): + ''' + Establishes and manages a one-minute lock on a blob for write + operations. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_lease_action: + Required. Possible values: acquire|renew|release|break|change + x_ms_lease_id: Required if the blob has an active lease. + x_ms_lease_duration: + Specifies the duration of the lease, in seconds, or negative one + (-1) for a lease that never expires. A non-infinite lease can be + between 15 and 60 seconds. A lease duration cannot be changed + using renew or change. For backwards compatibility, the default is + 60, and the value is only used on an acquire operation. + x_ms_lease_break_period: + Optional. For a break operation, this is the proposed duration of + seconds that the lease should continue before it is broken, between + 0 and 60 seconds. This break period is only used if it is shorter + than the time remaining on the lease. If longer, the time remaining + on the lease is used. A new lease will not be available before the + break period has expired, but the lease may be held for longer than + the break period. If this header does not appear with a break + operation, a fixed-duration lease breaks after the remaining lease + period elapses, and an infinite lease breaks immediately. + x_ms_proposed_lease_id: + Optional for acquire, required for change. Proposed lease ID, in a + GUID string format. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('x_ms_lease_action', x_ms_lease_action) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=lease' + request.headers = [ + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-lease-action', _str_or_none(x_ms_lease_action)), + ('x-ms-lease-duration', _str_or_none(x_ms_lease_duration\ + if x_ms_lease_action == 'acquire' else None)), + ('x-ms-lease-break-period', _str_or_none(x_ms_lease_break_period)), + ('x-ms-proposed-lease-id', _str_or_none(x_ms_proposed_lease_id)), + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_filter( + response, + filter=['x-ms-lease-id', 'x-ms-lease-time']) + + def snapshot_blob(self, container_name, blob_name, + x_ms_meta_name_values=None, if_modified_since=None, + if_unmodified_since=None, if_match=None, + if_none_match=None, x_ms_lease_id=None): + ''' + Creates a read-only snapshot of a blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_meta_name_values: Optional. Dict containing name and value pairs. + if_modified_since: Optional. Datetime string. + if_unmodified_since: DateTime string. + if_match: + Optional. snapshot the blob only if its ETag value matches the + value specified. + if_none_match: Optional. An ETag value + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=snapshot' + request.headers = [ + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('If-Modified-Since', _str_or_none(if_modified_since)), + ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), + ('If-Match', _str_or_none(if_match)), + ('If-None-Match', _str_or_none(if_none_match)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_filter( + response, + filter=['x-ms-snapshot', 'etag', 'last-modified']) + + def copy_blob(self, container_name, blob_name, x_ms_copy_source, + x_ms_meta_name_values=None, + x_ms_source_if_modified_since=None, + x_ms_source_if_unmodified_since=None, + x_ms_source_if_match=None, x_ms_source_if_none_match=None, + if_modified_since=None, if_unmodified_since=None, + if_match=None, if_none_match=None, x_ms_lease_id=None, + x_ms_source_lease_id=None): + ''' + Copies a blob to a destination within the storage account. + + container_name: Name of existing container. + blob_name: Name of existing blob. + x_ms_copy_source: + URL up to 2 KB in length that specifies a blob. A source blob in + the same account can be private, but a blob in another account + must be public or accept credentials included in this URL, such as + a Shared Access Signature. Examples: + https://myaccount.blob.core.windows.net/mycontainer/myblob + https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime> + x_ms_meta_name_values: Optional. Dict containing name and value pairs. + x_ms_source_if_modified_since: + Optional. An ETag value. Specify this conditional header to copy + the source blob only if its ETag matches the value specified. + x_ms_source_if_unmodified_since: + Optional. An ETag value. Specify this conditional header to copy + the blob only if its ETag does not match the value specified. + x_ms_source_if_match: + Optional. A DateTime value. Specify this conditional header to + copy the blob only if the source blob has been modified since the + specified date/time. + x_ms_source_if_none_match: + Optional. An ETag value. Specify this conditional header to copy + the source blob only if its ETag matches the value specified. + if_modified_since: Optional. Datetime string. + if_unmodified_since: DateTime string. + if_match: + Optional. Snapshot the blob only if its ETag value matches the + value specified. + if_none_match: Optional. An ETag value + x_ms_lease_id: Required if the blob has an active lease. + x_ms_source_lease_id: + Optional. Specify this to perform the Copy Blob operation only if + the lease ID given matches the active lease ID of the source blob. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('x_ms_copy_source', x_ms_copy_source) + + if x_ms_copy_source.startswith('/'): + # Backwards compatibility for earlier versions of the SDK where + # the copy source can be in the following formats: + # - Blob in named container: + # /accountName/containerName/blobName + # - Snapshot in named container: + # /accountName/containerName/blobName?snapshot=<DateTime> + # - Blob in root container: + # /accountName/blobName + # - Snapshot in root container: + # /accountName/blobName?snapshot=<DateTime> + account, _, source =\ + x_ms_copy_source.partition('/')[2].partition('/') + x_ms_copy_source = self.protocol + '://' + \ + account + self.host_base + '/' + source + + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' + request.headers = [ + ('x-ms-copy-source', _str_or_none(x_ms_copy_source)), + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-source-if-modified-since', + _str_or_none(x_ms_source_if_modified_since)), + ('x-ms-source-if-unmodified-since', + _str_or_none(x_ms_source_if_unmodified_since)), + ('x-ms-source-if-match', _str_or_none(x_ms_source_if_match)), + ('x-ms-source-if-none-match', + _str_or_none(x_ms_source_if_none_match)), + ('If-Modified-Since', _str_or_none(if_modified_since)), + ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), + ('If-Match', _str_or_none(if_match)), + ('If-None-Match', _str_or_none(if_none_match)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-source-lease-id', _str_or_none(x_ms_source_lease_id)) + ] + + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict(response) + + def abort_copy_blob(self, container_name, blob_name, x_ms_copy_id, + x_ms_lease_id=None): + ''' + Aborts a pending copy_blob operation, and leaves a destination blob + with zero length and full metadata. + + container_name: Name of destination container. + blob_name: Name of destination blob. + x_ms_copy_id: + Copy identifier provided in the x-ms-copy-id of the original + copy_blob operation. + x_ms_lease_id: + Required if the destination blob has an active infinite lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('x_ms_copy_id', x_ms_copy_id) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + \ + _str(blob_name) + '?comp=copy©id=' + \ + _str(x_ms_copy_id) + request.headers = [ + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-copy-action', 'abort'), + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def delete_blob(self, container_name, blob_name, snapshot=None, + timeout=None, x_ms_lease_id=None, + x_ms_delete_snapshots=None): + ''' + Marks the specified blob or snapshot for deletion. The blob is later + deleted during garbage collection. + + To mark a specific snapshot for deletion provide the date/time of the + snapshot via the snapshot parameter. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to delete. + timeout: + Optional. The timeout parameter is expressed in seconds. + The Blob service returns an error when the timeout interval elapses + while processing the request. + x_ms_lease_id: Required if the blob has an active lease. + x_ms_delete_snapshots: + Required if the blob has associated snapshots. Specify one of the + following two options: + include: Delete the base blob and all of its snapshots. + only: Delete only the blob's snapshots and not the blob itself. + This header should be specified only for a request against the base + blob resource. If this header is specified on a request to delete + an individual snapshot, the Blob service returns status code 400 + (Bad Request). If this header is not specified on the request and + the blob has associated snapshots, the Blob service returns status + code 409 (Conflict). + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(container_name) + '/' + _str(blob_name) + '' + request.headers = [ + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-delete-snapshots', _str_or_none(x_ms_delete_snapshots)) + ] + request.query = [ + ('snapshot', _str_or_none(snapshot)), + ('timeout', _int_or_none(timeout)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def put_block(self, container_name, blob_name, block, blockid, + content_md5=None, x_ms_lease_id=None): + ''' + Creates a new block to be committed as part of a blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + block: Content of the block. + blockid: + Required. A value that identifies the block. The string must be + less than or equal to 64 bytes in size. + content_md5: + Optional. An MD5 hash of the block content. This hash is used to + verify the integrity of the blob during transport. When this + header is specified, the storage service checks the hash that has + arrived with the one that was sent. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('block', block) + _validate_not_none('blockid', blockid) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=block' + request.headers = [ + ('Content-MD5', _str_or_none(content_md5)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.query = [('blockid', _encode_base64(_str_or_none(blockid)))] + request.body = _get_request_body_bytes_only('block', block) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def put_block_list(self, container_name, blob_name, block_list, + content_md5=None, x_ms_blob_cache_control=None, + x_ms_blob_content_type=None, + x_ms_blob_content_encoding=None, + x_ms_blob_content_language=None, + x_ms_blob_content_md5=None, x_ms_meta_name_values=None, + x_ms_lease_id=None): + ''' + Writes a blob by specifying the list of block IDs that make up the + blob. In order to be written as part of a blob, a block must have been + successfully written to the server in a prior Put Block (REST API) + operation. + + container_name: Name of existing container. + blob_name: Name of existing blob. + block_list: A str list containing the block ids. + content_md5: + Optional. An MD5 hash of the block content. This hash is used to + verify the integrity of the blob during transport. When this header + is specified, the storage service checks the hash that has arrived + with the one that was sent. + x_ms_blob_cache_control: + Optional. Sets the blob's cache control. If specified, this + property is stored with the blob and returned with a read request. + x_ms_blob_content_type: + Optional. Sets the blob's content type. If specified, this property + is stored with the blob and returned with a read request. + x_ms_blob_content_encoding: + Optional. Sets the blob's content encoding. If specified, this + property is stored with the blob and returned with a read request. + x_ms_blob_content_language: + Optional. Set the blob's content language. If specified, this + property is stored with the blob and returned with a read request. + x_ms_blob_content_md5: + Optional. An MD5 hash of the blob content. Note that this hash is + not validated, as the hashes for the individual blocks were + validated when each was uploaded. + x_ms_meta_name_values: Optional. Dict containing name and value pairs. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('block_list', block_list) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=blocklist' + request.headers = [ + ('Content-MD5', _str_or_none(content_md5)), + ('x-ms-blob-cache-control', _str_or_none(x_ms_blob_cache_control)), + ('x-ms-blob-content-type', _str_or_none(x_ms_blob_content_type)), + ('x-ms-blob-content-encoding', + _str_or_none(x_ms_blob_content_encoding)), + ('x-ms-blob-content-language', + _str_or_none(x_ms_blob_content_language)), + ('x-ms-blob-content-md5', _str_or_none(x_ms_blob_content_md5)), + ('x-ms-meta-name-values', x_ms_meta_name_values), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.body = _get_request_body( + _convert_block_list_to_xml(block_list)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def get_block_list(self, container_name, blob_name, snapshot=None, + blocklisttype=None, x_ms_lease_id=None): + ''' + Retrieves the list of blocks that have been uploaded as part of a + block blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. Datetime to determine the time to retrieve the blocks. + blocklisttype: + Specifies whether to return the list of committed blocks, the list + of uncommitted blocks, or both lists together. Valid values are: + committed, uncommitted, or all. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=blocklist' + request.headers = [('x-ms-lease-id', _str_or_none(x_ms_lease_id))] + request.query = [ + ('snapshot', _str_or_none(snapshot)), + ('blocklisttype', _str_or_none(blocklisttype)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _convert_response_to_block_list(response) + + def put_page(self, container_name, blob_name, page, x_ms_range, + x_ms_page_write, timeout=None, content_md5=None, + x_ms_lease_id=None, x_ms_if_sequence_number_lte=None, + x_ms_if_sequence_number_lt=None, + x_ms_if_sequence_number_eq=None, + if_modified_since=None, if_unmodified_since=None, + if_match=None, if_none_match=None): + ''' + Writes a range of pages to a page blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + page: Content of the page. + x_ms_range: + Required. Specifies the range of bytes to be written as a page. + Both the start and end of the range must be specified. Must be in + format: bytes=startByte-endByte. Given that pages must be aligned + with 512-byte boundaries, the start offset must be a modulus of + 512 and the end offset must be a modulus of 512-1. Examples of + valid byte ranges are 0-511, 512-1023, etc. + x_ms_page_write: + Required. You may specify one of the following options: + update (lower case): + Writes the bytes specified by the request body into the + specified range. The Range and Content-Length headers must + match to perform the update. + clear (lower case): + Clears the specified range and releases the space used in + storage for that range. To clear a range, set the + Content-Length header to zero, and the Range header to a + value that indicates the range to clear, up to maximum + blob size. + timeout: the timeout parameter is expressed in seconds. + content_md5: + Optional. An MD5 hash of the page content. This hash is used to + verify the integrity of the page during transport. When this header + is specified, the storage service compares the hash of the content + that has arrived with the header value that was sent. If the two + hashes do not match, the operation will fail with error code 400 + (Bad Request). + x_ms_lease_id: Required if the blob has an active lease. + x_ms_if_sequence_number_lte: + Optional. If the blob's sequence number is less than or equal to + the specified value, the request proceeds; otherwise it fails. + x_ms_if_sequence_number_lt: + Optional. If the blob's sequence number is less than the specified + value, the request proceeds; otherwise it fails. + x_ms_if_sequence_number_eq: + Optional. If the blob's sequence number is equal to the specified + value, the request proceeds; otherwise it fails. + if_modified_since: + Optional. A DateTime value. Specify this conditional header to + write the page only if the blob has been modified since the + specified date/time. If the blob has not been modified, the Blob + service fails. + if_unmodified_since: + Optional. A DateTime value. Specify this conditional header to + write the page only if the blob has not been modified since the + specified date/time. If the blob has been modified, the Blob + service fails. + if_match: + Optional. An ETag value. Specify an ETag value for this conditional + header to write the page only if the blob's ETag value matches the + value specified. If the values do not match, the Blob service fails. + if_none_match: + Optional. An ETag value. Specify an ETag value for this conditional + header to write the page only if the blob's ETag value does not + match the value specified. If the values are identical, the Blob + service fails. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + _validate_not_none('page', page) + _validate_not_none('x_ms_range', x_ms_range) + _validate_not_none('x_ms_page_write', x_ms_page_write) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=page' + request.headers = [ + ('x-ms-range', _str_or_none(x_ms_range)), + ('Content-MD5', _str_or_none(content_md5)), + ('x-ms-page-write', _str_or_none(x_ms_page_write)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)), + ('x-ms-if-sequence-number-le', + _str_or_none(x_ms_if_sequence_number_lte)), + ('x-ms-if-sequence-number-lt', + _str_or_none(x_ms_if_sequence_number_lt)), + ('x-ms-if-sequence-number-eq', + _str_or_none(x_ms_if_sequence_number_eq)), + ('If-Modified-Since', _str_or_none(if_modified_since)), + ('If-Unmodified-Since', _str_or_none(if_unmodified_since)), + ('If-Match', _str_or_none(if_match)), + ('If-None-Match', _str_or_none(if_none_match)) + ] + request.query = [('timeout', _int_or_none(timeout))] + request.body = _get_request_body_bytes_only('page', page) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def get_page_ranges(self, container_name, blob_name, snapshot=None, + range=None, x_ms_range=None, x_ms_lease_id=None): + ''' + Retrieves the page ranges for a blob. + + container_name: Name of existing container. + blob_name: Name of existing blob. + snapshot: + Optional. The snapshot parameter is an opaque DateTime value that, + when present, specifies the blob snapshot to retrieve information + from. + range: + Optional. Specifies the range of bytes over which to list ranges, + inclusively. If omitted, then all ranges for the blob are returned. + x_ms_range: + Optional. Specifies the range of bytes to be written as a page. + Both the start and end of the range must be specified. Must be in + format: bytes=startByte-endByte. Given that pages must be aligned + with 512-byte boundaries, the start offset must be a modulus of + 512 and the end offset must be a modulus of 512-1. Examples of + valid byte ranges are 0-511, 512-1023, etc. + x_ms_lease_id: Required if the blob has an active lease. + ''' + _validate_not_none('container_name', container_name) + _validate_not_none('blob_name', blob_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + \ + _str(container_name) + '/' + _str(blob_name) + '?comp=pagelist' + request.headers = [ + ('Range', _str_or_none(range)), + ('x-ms-range', _str_or_none(x_ms_range)), + ('x-ms-lease-id', _str_or_none(x_ms_lease_id)) + ] + request.query = [('snapshot', _str_or_none(snapshot))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_blob_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_simple_list(response, PageList, PageRange, "page_ranges") diff --git a/awx/lib/site-packages/azure/storage/cloudstorageaccount.py b/awx/lib/site-packages/azure/storage/cloudstorageaccount.py index e043f898cd..4cca4c45b3 100644 --- a/awx/lib/site-packages/azure/storage/cloudstorageaccount.py +++ b/awx/lib/site-packages/azure/storage/cloudstorageaccount.py @@ -1,39 +1,39 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure.storage.blobservice import BlobService -from azure.storage.tableservice import TableService -from azure.storage.queueservice import QueueService - - -class CloudStorageAccount(object): - - """ - Provides a factory for creating the blob, queue, and table services - with a common account name and account key. Users can either use the - factory or can construct the appropriate service directly. - """ - - def __init__(self, account_name=None, account_key=None): - self.account_name = account_name - self.account_key = account_key - - def create_blob_service(self): - return BlobService(self.account_name, self.account_key) - - def create_table_service(self): - return TableService(self.account_name, self.account_key) - - def create_queue_service(self): - return QueueService(self.account_name, self.account_key) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure.storage.blobservice import BlobService +from azure.storage.tableservice import TableService +from azure.storage.queueservice import QueueService + + +class CloudStorageAccount(object): + + """ + Provides a factory for creating the blob, queue, and table services + with a common account name and account key. Users can either use the + factory or can construct the appropriate service directly. + """ + + def __init__(self, account_name=None, account_key=None): + self.account_name = account_name + self.account_key = account_key + + def create_blob_service(self): + return BlobService(self.account_name, self.account_key) + + def create_table_service(self): + return TableService(self.account_name, self.account_key) + + def create_queue_service(self): + return QueueService(self.account_name, self.account_key) diff --git a/awx/lib/site-packages/azure/storage/queueservice.py b/awx/lib/site-packages/azure/storage/queueservice.py index fdde5fafd9..bfcb47a2de 100644 --- a/awx/lib/site-packages/azure/storage/queueservice.py +++ b/awx/lib/site-packages/azure/storage/queueservice.py @@ -1,458 +1,458 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import ( - WindowsAzureConflictError, - WindowsAzureError, - DEV_QUEUE_HOST, - QUEUE_SERVICE_HOST_BASE, - xml_escape, - _convert_class_to_xml, - _dont_fail_not_exist, - _dont_fail_on_exist, - _get_request_body, - _int_or_none, - _parse_enum_results_list, - _parse_response, - _parse_response_for_dict_filter, - _parse_response_for_dict_prefix, - _str, - _str_or_none, - _update_request_uri_query_local_storage, - _validate_not_none, - _ERROR_CONFLICT, - ) -from azure.http import ( - HTTPRequest, - HTTP_RESPONSE_NO_CONTENT, - ) -from azure.storage import ( - Queue, - QueueEnumResults, - QueueMessagesList, - StorageServiceProperties, - _update_storage_queue_header, - ) -from azure.storage.storageclient import _StorageClient - - -class QueueService(_StorageClient): - - ''' - This is the main class managing queue resources. - ''' - - def __init__(self, account_name=None, account_key=None, protocol='https', - host_base=QUEUE_SERVICE_HOST_BASE, dev_host=DEV_QUEUE_HOST): - ''' - account_name: your storage account name, required for all operations. - account_key: your storage account key, required for all operations. - protocol: Optional. Protocol. Defaults to http. - host_base: - Optional. Live host base url. Defaults to Azure url. Override this - for on-premise. - dev_host: Optional. Dev host url. Defaults to localhost. - ''' - super(QueueService, self).__init__( - account_name, account_key, protocol, host_base, dev_host) - - def get_queue_service_properties(self, timeout=None): - ''' - Gets the properties of a storage account's Queue Service, including - Windows Azure Storage Analytics. - - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.query = [('timeout', _int_or_none(timeout))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response(response, StorageServiceProperties) - - def list_queues(self, prefix=None, marker=None, maxresults=None, - include=None): - ''' - Lists all of the queues in a given storage account. - - prefix: - Filters the results to return only queues with names that begin - with the specified prefix. - marker: - A string value that identifies the portion of the list to be - returned with the next list operation. The operation returns a - NextMarker element within the response body if the list returned - was not complete. This value may then be used as a query parameter - in a subsequent call to request the next portion of the list of - queues. The marker value is opaque to the client. - maxresults: - Specifies the maximum number of queues to return. If maxresults is - not specified, the server will return up to 5,000 items. - include: - Optional. Include this parameter to specify that the container's - metadata be returned as part of the response body. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/?comp=list' - request.query = [ - ('prefix', _str_or_none(prefix)), - ('marker', _str_or_none(marker)), - ('maxresults', _int_or_none(maxresults)), - ('include', _str_or_none(include)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_enum_results_list( - response, QueueEnumResults, "Queues", Queue) - - def create_queue(self, queue_name, x_ms_meta_name_values=None, - fail_on_exist=False): - ''' - Creates a queue under the given account. - - queue_name: name of the queue. - x_ms_meta_name_values: - Optional. A dict containing name-value pairs to associate with the - queue as metadata. - fail_on_exist: Specify whether throw exception when queue exists. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '' - request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - if not fail_on_exist: - try: - response = self._perform_request(request) - if response.status == HTTP_RESPONSE_NO_CONTENT: - return False - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - response = self._perform_request(request) - if response.status == HTTP_RESPONSE_NO_CONTENT: - raise WindowsAzureConflictError( - _ERROR_CONFLICT.format(response.message)) - return True - - def delete_queue(self, queue_name, fail_not_exist=False): - ''' - Permanently deletes the specified queue. - - queue_name: Name of the queue. - fail_not_exist: - Specify whether throw exception when queue doesn't exist. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_queue_metadata(self, queue_name): - ''' - Retrieves user-defined metadata and queue properties on the specified - queue. Metadata is associated with the queue as name-values pairs. - - queue_name: Name of the queue. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '?comp=metadata' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_prefix( - response, - prefixes=['x-ms-meta', 'x-ms-approximate-messages-count']) - - def set_queue_metadata(self, queue_name, x_ms_meta_name_values=None): - ''' - Sets user-defined metadata on the specified queue. Metadata is - associated with the queue as name-value pairs. - - queue_name: Name of the queue. - x_ms_meta_name_values: - Optional. A dict containing name-value pairs to associate with the - queue as metadata. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '?comp=metadata' - request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def put_message(self, queue_name, message_text, visibilitytimeout=None, - messagettl=None): - ''' - Adds a new message to the back of the message queue. A visibility - timeout can also be specified to make the message invisible until the - visibility timeout expires. A message must be in a format that can be - included in an XML request with UTF-8 encoding. The encoded message can - be up to 64KB in size for versions 2011-08-18 and newer, or 8KB in size - for previous versions. - - queue_name: Name of the queue. - message_text: Message content. - visibilitytimeout: - Optional. If not specified, the default value is 0. Specifies the - new visibility timeout value, in seconds, relative to server time. - The new value must be larger than or equal to 0, and cannot be - larger than 7 days. The visibility timeout of a message cannot be - set to a value later than the expiry time. visibilitytimeout - should be set to a value smaller than the time-to-live value. - messagettl: - Optional. Specifies the time-to-live interval for the message, in - seconds. The maximum time-to-live allowed is 7 days. If this - parameter is omitted, the default time-to-live is 7 days. - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('message_text', message_text) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages' - request.query = [ - ('visibilitytimeout', _str_or_none(visibilitytimeout)), - ('messagettl', _str_or_none(messagettl)) - ] - request.body = _get_request_body( - '<?xml version="1.0" encoding="utf-8"?> \ -<QueueMessage> \ - <MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \ -</QueueMessage>') - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def get_messages(self, queue_name, numofmessages=None, - visibilitytimeout=None): - ''' - Retrieves one or more messages from the front of the queue. - - queue_name: Name of the queue. - numofmessages: - Optional. A nonzero integer value that specifies the number of - messages to retrieve from the queue, up to a maximum of 32. If - fewer are visible, the visible messages are returned. By default, - a single message is retrieved from the queue with this operation. - visibilitytimeout: - Specifies the new visibility timeout value, in seconds, relative - to server time. The new value must be larger than or equal to 1 - second, and cannot be larger than 7 days, or larger than 2 hours - on REST protocol versions prior to version 2011-08-18. The - visibility timeout of a message can be set to a value later than - the expiry time. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages' - request.query = [ - ('numofmessages', _str_or_none(numofmessages)), - ('visibilitytimeout', _str_or_none(visibilitytimeout)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response(response, QueueMessagesList) - - def peek_messages(self, queue_name, numofmessages=None): - ''' - Retrieves one or more messages from the front of the queue, but does - not alter the visibility of the message. - - queue_name: Name of the queue. - numofmessages: - Optional. A nonzero integer value that specifies the number of - messages to peek from the queue, up to a maximum of 32. By default, - a single message is peeked from the queue with this operation. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages?peekonly=true' - request.query = [('numofmessages', _str_or_none(numofmessages))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response(response, QueueMessagesList) - - def delete_message(self, queue_name, message_id, popreceipt): - ''' - Deletes the specified message. - - queue_name: Name of the queue. - message_id: Message to delete. - popreceipt: - Required. A valid pop receipt value returned from an earlier call - to the Get Messages or Update Message operation. - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('message_id', message_id) - _validate_not_none('popreceipt', popreceipt) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + \ - _str(queue_name) + '/messages/' + _str(message_id) + '' - request.query = [('popreceipt', _str_or_none(popreceipt))] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def clear_messages(self, queue_name): - ''' - Deletes all messages from the specified queue. - - queue_name: Name of the queue. - ''' - _validate_not_none('queue_name', queue_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + _str(queue_name) + '/messages' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - self._perform_request(request) - - def update_message(self, queue_name, message_id, message_text, popreceipt, - visibilitytimeout): - ''' - Updates the visibility timeout of a message. You can also use this - operation to update the contents of a message. - - queue_name: Name of the queue. - message_id: Message to update. - message_text: Content of message. - popreceipt: - Required. A valid pop receipt value returned from an earlier call - to the Get Messages or Update Message operation. - visibilitytimeout: - Required. Specifies the new visibility timeout value, in seconds, - relative to server time. The new value must be larger than or equal - to 0, and cannot be larger than 7 days. The visibility timeout of a - message cannot be set to a value later than the expiry time. A - message can be updated until it has been deleted or has expired. - ''' - _validate_not_none('queue_name', queue_name) - _validate_not_none('message_id', message_id) - _validate_not_none('message_text', message_text) - _validate_not_none('popreceipt', popreceipt) - _validate_not_none('visibilitytimeout', visibilitytimeout) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(queue_name) + '/messages/' + _str(message_id) + '' - request.query = [ - ('popreceipt', _str_or_none(popreceipt)), - ('visibilitytimeout', _str_or_none(visibilitytimeout)) - ] - request.body = _get_request_body( - '<?xml version="1.0" encoding="utf-8"?> \ -<QueueMessage> \ - <MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \ -</QueueMessage>') - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - response = self._perform_request(request) - - return _parse_response_for_dict_filter( - response, - filter=['x-ms-popreceipt', 'x-ms-time-next-visible']) - - def set_queue_service_properties(self, storage_service_properties, - timeout=None): - ''' - Sets the properties of a storage account's Queue service, including - Windows Azure Storage Analytics. - - storage_service_properties: StorageServiceProperties object. - timeout: Optional. The timeout parameter is expressed in seconds. - ''' - _validate_not_none('storage_service_properties', - storage_service_properties) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.query = [('timeout', _int_or_none(timeout))] - request.body = _get_request_body( - _convert_class_to_xml(storage_service_properties)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_queue_header( - request, self.account_name, self.account_key) - self._perform_request(request) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + WindowsAzureConflictError, + WindowsAzureError, + DEV_QUEUE_HOST, + QUEUE_SERVICE_HOST_BASE, + xml_escape, + _convert_class_to_xml, + _dont_fail_not_exist, + _dont_fail_on_exist, + _get_request_body, + _int_or_none, + _parse_enum_results_list, + _parse_response, + _parse_response_for_dict_filter, + _parse_response_for_dict_prefix, + _str, + _str_or_none, + _update_request_uri_query_local_storage, + _validate_not_none, + _ERROR_CONFLICT, + ) +from azure.http import ( + HTTPRequest, + HTTP_RESPONSE_NO_CONTENT, + ) +from azure.storage import ( + Queue, + QueueEnumResults, + QueueMessagesList, + StorageServiceProperties, + _update_storage_queue_header, + ) +from azure.storage.storageclient import _StorageClient + + +class QueueService(_StorageClient): + + ''' + This is the main class managing queue resources. + ''' + + def __init__(self, account_name=None, account_key=None, protocol='https', + host_base=QUEUE_SERVICE_HOST_BASE, dev_host=DEV_QUEUE_HOST): + ''' + account_name: your storage account name, required for all operations. + account_key: your storage account key, required for all operations. + protocol: Optional. Protocol. Defaults to http. + host_base: + Optional. Live host base url. Defaults to Azure url. Override this + for on-premise. + dev_host: Optional. Dev host url. Defaults to localhost. + ''' + super(QueueService, self).__init__( + account_name, account_key, protocol, host_base, dev_host) + + def get_queue_service_properties(self, timeout=None): + ''' + Gets the properties of a storage account's Queue Service, including + Windows Azure Storage Analytics. + + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.query = [('timeout', _int_or_none(timeout))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response(response, StorageServiceProperties) + + def list_queues(self, prefix=None, marker=None, maxresults=None, + include=None): + ''' + Lists all of the queues in a given storage account. + + prefix: + Filters the results to return only queues with names that begin + with the specified prefix. + marker: + A string value that identifies the portion of the list to be + returned with the next list operation. The operation returns a + NextMarker element within the response body if the list returned + was not complete. This value may then be used as a query parameter + in a subsequent call to request the next portion of the list of + queues. The marker value is opaque to the client. + maxresults: + Specifies the maximum number of queues to return. If maxresults is + not specified, the server will return up to 5,000 items. + include: + Optional. Include this parameter to specify that the container's + metadata be returned as part of the response body. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/?comp=list' + request.query = [ + ('prefix', _str_or_none(prefix)), + ('marker', _str_or_none(marker)), + ('maxresults', _int_or_none(maxresults)), + ('include', _str_or_none(include)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_enum_results_list( + response, QueueEnumResults, "Queues", Queue) + + def create_queue(self, queue_name, x_ms_meta_name_values=None, + fail_on_exist=False): + ''' + Creates a queue under the given account. + + queue_name: name of the queue. + x_ms_meta_name_values: + Optional. A dict containing name-value pairs to associate with the + queue as metadata. + fail_on_exist: Specify whether throw exception when queue exists. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '' + request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + if not fail_on_exist: + try: + response = self._perform_request(request) + if response.status == HTTP_RESPONSE_NO_CONTENT: + return False + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + response = self._perform_request(request) + if response.status == HTTP_RESPONSE_NO_CONTENT: + raise WindowsAzureConflictError( + _ERROR_CONFLICT.format(response.message)) + return True + + def delete_queue(self, queue_name, fail_not_exist=False): + ''' + Permanently deletes the specified queue. + + queue_name: Name of the queue. + fail_not_exist: + Specify whether throw exception when queue doesn't exist. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_queue_metadata(self, queue_name): + ''' + Retrieves user-defined metadata and queue properties on the specified + queue. Metadata is associated with the queue as name-values pairs. + + queue_name: Name of the queue. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '?comp=metadata' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_prefix( + response, + prefixes=['x-ms-meta', 'x-ms-approximate-messages-count']) + + def set_queue_metadata(self, queue_name, x_ms_meta_name_values=None): + ''' + Sets user-defined metadata on the specified queue. Metadata is + associated with the queue as name-value pairs. + + queue_name: Name of the queue. + x_ms_meta_name_values: + Optional. A dict containing name-value pairs to associate with the + queue as metadata. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '?comp=metadata' + request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def put_message(self, queue_name, message_text, visibilitytimeout=None, + messagettl=None): + ''' + Adds a new message to the back of the message queue. A visibility + timeout can also be specified to make the message invisible until the + visibility timeout expires. A message must be in a format that can be + included in an XML request with UTF-8 encoding. The encoded message can + be up to 64KB in size for versions 2011-08-18 and newer, or 8KB in size + for previous versions. + + queue_name: Name of the queue. + message_text: Message content. + visibilitytimeout: + Optional. If not specified, the default value is 0. Specifies the + new visibility timeout value, in seconds, relative to server time. + The new value must be larger than or equal to 0, and cannot be + larger than 7 days. The visibility timeout of a message cannot be + set to a value later than the expiry time. visibilitytimeout + should be set to a value smaller than the time-to-live value. + messagettl: + Optional. Specifies the time-to-live interval for the message, in + seconds. The maximum time-to-live allowed is 7 days. If this + parameter is omitted, the default time-to-live is 7 days. + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('message_text', message_text) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages' + request.query = [ + ('visibilitytimeout', _str_or_none(visibilitytimeout)), + ('messagettl', _str_or_none(messagettl)) + ] + request.body = _get_request_body( + '<?xml version="1.0" encoding="utf-8"?> \ +<QueueMessage> \ + <MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \ +</QueueMessage>') + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def get_messages(self, queue_name, numofmessages=None, + visibilitytimeout=None): + ''' + Retrieves one or more messages from the front of the queue. + + queue_name: Name of the queue. + numofmessages: + Optional. A nonzero integer value that specifies the number of + messages to retrieve from the queue, up to a maximum of 32. If + fewer are visible, the visible messages are returned. By default, + a single message is retrieved from the queue with this operation. + visibilitytimeout: + Specifies the new visibility timeout value, in seconds, relative + to server time. The new value must be larger than or equal to 1 + second, and cannot be larger than 7 days, or larger than 2 hours + on REST protocol versions prior to version 2011-08-18. The + visibility timeout of a message can be set to a value later than + the expiry time. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages' + request.query = [ + ('numofmessages', _str_or_none(numofmessages)), + ('visibilitytimeout', _str_or_none(visibilitytimeout)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response(response, QueueMessagesList) + + def peek_messages(self, queue_name, numofmessages=None): + ''' + Retrieves one or more messages from the front of the queue, but does + not alter the visibility of the message. + + queue_name: Name of the queue. + numofmessages: + Optional. A nonzero integer value that specifies the number of + messages to peek from the queue, up to a maximum of 32. By default, + a single message is peeked from the queue with this operation. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages?peekonly=true' + request.query = [('numofmessages', _str_or_none(numofmessages))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response(response, QueueMessagesList) + + def delete_message(self, queue_name, message_id, popreceipt): + ''' + Deletes the specified message. + + queue_name: Name of the queue. + message_id: Message to delete. + popreceipt: + Required. A valid pop receipt value returned from an earlier call + to the Get Messages or Update Message operation. + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('message_id', message_id) + _validate_not_none('popreceipt', popreceipt) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + \ + _str(queue_name) + '/messages/' + _str(message_id) + '' + request.query = [('popreceipt', _str_or_none(popreceipt))] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def clear_messages(self, queue_name): + ''' + Deletes all messages from the specified queue. + + queue_name: Name of the queue. + ''' + _validate_not_none('queue_name', queue_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + _str(queue_name) + '/messages' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + self._perform_request(request) + + def update_message(self, queue_name, message_id, message_text, popreceipt, + visibilitytimeout): + ''' + Updates the visibility timeout of a message. You can also use this + operation to update the contents of a message. + + queue_name: Name of the queue. + message_id: Message to update. + message_text: Content of message. + popreceipt: + Required. A valid pop receipt value returned from an earlier call + to the Get Messages or Update Message operation. + visibilitytimeout: + Required. Specifies the new visibility timeout value, in seconds, + relative to server time. The new value must be larger than or equal + to 0, and cannot be larger than 7 days. The visibility timeout of a + message cannot be set to a value later than the expiry time. A + message can be updated until it has been deleted or has expired. + ''' + _validate_not_none('queue_name', queue_name) + _validate_not_none('message_id', message_id) + _validate_not_none('message_text', message_text) + _validate_not_none('popreceipt', popreceipt) + _validate_not_none('visibilitytimeout', visibilitytimeout) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(queue_name) + '/messages/' + _str(message_id) + '' + request.query = [ + ('popreceipt', _str_or_none(popreceipt)), + ('visibilitytimeout', _str_or_none(visibilitytimeout)) + ] + request.body = _get_request_body( + '<?xml version="1.0" encoding="utf-8"?> \ +<QueueMessage> \ + <MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \ +</QueueMessage>') + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + response = self._perform_request(request) + + return _parse_response_for_dict_filter( + response, + filter=['x-ms-popreceipt', 'x-ms-time-next-visible']) + + def set_queue_service_properties(self, storage_service_properties, + timeout=None): + ''' + Sets the properties of a storage account's Queue service, including + Windows Azure Storage Analytics. + + storage_service_properties: StorageServiceProperties object. + timeout: Optional. The timeout parameter is expressed in seconds. + ''' + _validate_not_none('storage_service_properties', + storage_service_properties) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.query = [('timeout', _int_or_none(timeout))] + request.body = _get_request_body( + _convert_class_to_xml(storage_service_properties)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_queue_header( + request, self.account_name, self.account_key) + self._perform_request(request) diff --git a/awx/lib/site-packages/azure/storage/sharedaccesssignature.py b/awx/lib/site-packages/azure/storage/sharedaccesssignature.py index a882461cd4..a9ec14cc70 100644 --- a/awx/lib/site-packages/azure/storage/sharedaccesssignature.py +++ b/awx/lib/site-packages/azure/storage/sharedaccesssignature.py @@ -1,230 +1,231 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import url_quote -from azure.storage import _sign_string, X_MS_VERSION - -#------------------------------------------------------------------------- -# Constants for the share access signature -SIGNED_START = 'st' -SIGNED_EXPIRY = 'se' -SIGNED_RESOURCE = 'sr' -SIGNED_PERMISSION = 'sp' -SIGNED_IDENTIFIER = 'si' -SIGNED_SIGNATURE = 'sig' -SIGNED_VERSION = 'sv' -RESOURCE_BLOB = 'b' -RESOURCE_CONTAINER = 'c' -SIGNED_RESOURCE_TYPE = 'resource' -SHARED_ACCESS_PERMISSION = 'permission' - -#-------------------------------------------------------------------------- - - -class WebResource(object): - - ''' - Class that stands for the resource to get the share access signature - - path: the resource path. - properties: dict of name and values. Contains 2 item: resource type and - permission - request_url: the url of the webresource include all the queries. - ''' - - def __init__(self, path=None, request_url=None, properties=None): - self.path = path - self.properties = properties or {} - self.request_url = request_url - - -class Permission(object): - - ''' - Permission class. Contains the path and query_string for the path. - - path: the resource path - query_string: dict of name, values. Contains SIGNED_START, SIGNED_EXPIRY - SIGNED_RESOURCE, SIGNED_PERMISSION, SIGNED_IDENTIFIER, - SIGNED_SIGNATURE name values. - ''' - - def __init__(self, path=None, query_string=None): - self.path = path - self.query_string = query_string - - -class SharedAccessPolicy(object): - - ''' SharedAccessPolicy class. ''' - - def __init__(self, access_policy, signed_identifier=None): - self.id = signed_identifier - self.access_policy = access_policy - - -class SharedAccessSignature(object): - - ''' - The main class used to do the signing and generating the signature. - - account_name: - the storage account name used to generate shared access signature - account_key: the access key to genenerate share access signature - permission_set: the permission cache used to signed the request url. - ''' - - def __init__(self, account_name, account_key, permission_set=None): - self.account_name = account_name - self.account_key = account_key - self.permission_set = permission_set - - def generate_signed_query_string(self, path, resource_type, - shared_access_policy, - version=X_MS_VERSION): - ''' - Generates the query string for path, resource type and shared access - policy. - - path: the resource - resource_type: could be blob or container - shared_access_policy: shared access policy - version: - x-ms-version for storage service, or None to get a signed query - string compatible with pre 2012-02-12 clients, where the version - is not included in the query string. - ''' - - query_string = {} - if shared_access_policy.access_policy.start: - query_string[ - SIGNED_START] = shared_access_policy.access_policy.start - - if version: - query_string[SIGNED_VERSION] = version - query_string[SIGNED_EXPIRY] = shared_access_policy.access_policy.expiry - query_string[SIGNED_RESOURCE] = resource_type - query_string[ - SIGNED_PERMISSION] = shared_access_policy.access_policy.permission - - if shared_access_policy.id: - query_string[SIGNED_IDENTIFIER] = shared_access_policy.id - - query_string[SIGNED_SIGNATURE] = self._generate_signature( - path, shared_access_policy, version) - return query_string - - def sign_request(self, web_resource): - ''' sign request to generate request_url with sharedaccesssignature - info for web_resource.''' - - if self.permission_set: - for shared_access_signature in self.permission_set: - if self._permission_matches_request( - shared_access_signature, web_resource, - web_resource.properties[ - SIGNED_RESOURCE_TYPE], - web_resource.properties[SHARED_ACCESS_PERMISSION]): - if web_resource.request_url.find('?') == -1: - web_resource.request_url += '?' - else: - web_resource.request_url += '&' - - web_resource.request_url += self._convert_query_string( - shared_access_signature.query_string) - break - return web_resource - - def _convert_query_string(self, query_string): - ''' Converts query string to str. The order of name, values is very - important and can't be wrong.''' - - convert_str = '' - if SIGNED_START in query_string: - convert_str += SIGNED_START + '=' + \ - url_quote(query_string[SIGNED_START]) + '&' - convert_str += SIGNED_EXPIRY + '=' + \ - url_quote(query_string[SIGNED_EXPIRY]) + '&' - convert_str += SIGNED_PERMISSION + '=' + \ - query_string[SIGNED_PERMISSION] + '&' - convert_str += SIGNED_RESOURCE + '=' + \ - query_string[SIGNED_RESOURCE] + '&' - - if SIGNED_IDENTIFIER in query_string: - convert_str += SIGNED_IDENTIFIER + '=' + \ - query_string[SIGNED_IDENTIFIER] + '&' - if SIGNED_VERSION in query_string: - convert_str += SIGNED_VERSION + '=' + \ - query_string[SIGNED_VERSION] + '&' - convert_str += SIGNED_SIGNATURE + '=' + \ - url_quote(query_string[SIGNED_SIGNATURE]) + '&' - return convert_str - - def _generate_signature(self, path, shared_access_policy, version): - ''' Generates signature for a given path and shared access policy. ''' - - def get_value_to_append(value, no_new_line=False): - return_value = '' - if value: - return_value = value - if not no_new_line: - return_value += '\n' - return return_value - - if path[0] != '/': - path = '/' + path - - canonicalized_resource = '/' + self.account_name + path - - # Form the string to sign from shared_access_policy and canonicalized - # resource. The order of values is important. - string_to_sign = \ - (get_value_to_append(shared_access_policy.access_policy.permission) + - get_value_to_append(shared_access_policy.access_policy.start) + - get_value_to_append(shared_access_policy.access_policy.expiry) + - get_value_to_append(canonicalized_resource)) - - if version: - string_to_sign += get_value_to_append(shared_access_policy.id) - string_to_sign += get_value_to_append(version, True) - else: - string_to_sign += get_value_to_append(shared_access_policy.id, True) - - return self._sign(string_to_sign) - - def _permission_matches_request(self, shared_access_signature, - web_resource, resource_type, - required_permission): - ''' Check whether requested permission matches given - shared_access_signature, web_resource and resource type. ''' - - required_resource_type = resource_type - if required_resource_type == RESOURCE_BLOB: - required_resource_type += RESOURCE_CONTAINER - - for name, value in shared_access_signature.query_string.items(): - if name == SIGNED_RESOURCE and \ - required_resource_type.find(value) == -1: - return False - elif name == SIGNED_PERMISSION and \ - required_permission.find(value) == -1: - return False - - return web_resource.path.find(shared_access_signature.path) != -1 - - def _sign(self, string_to_sign): - ''' use HMAC-SHA256 to sign the string and convert it as base64 - encoded string. ''' - - return _sign_string(self.account_key, string_to_sign) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import _sign_string, url_quote +from azure.storage import X_MS_VERSION + +#------------------------------------------------------------------------- +# Constants for the share access signature +SIGNED_VERSION = 'sv' +SIGNED_START = 'st' +SIGNED_EXPIRY = 'se' +SIGNED_RESOURCE = 'sr' +SIGNED_PERMISSION = 'sp' +SIGNED_IDENTIFIER = 'si' +SIGNED_SIGNATURE = 'sig' +SIGNED_VERSION = 'sv' +RESOURCE_BLOB = 'b' +RESOURCE_CONTAINER = 'c' +SIGNED_RESOURCE_TYPE = 'resource' +SHARED_ACCESS_PERMISSION = 'permission' + +#-------------------------------------------------------------------------- + + +class WebResource(object): + + ''' + Class that stands for the resource to get the share access signature + + path: the resource path. + properties: dict of name and values. Contains 2 item: resource type and + permission + request_url: the url of the webresource include all the queries. + ''' + + def __init__(self, path=None, request_url=None, properties=None): + self.path = path + self.properties = properties or {} + self.request_url = request_url + + +class Permission(object): + + ''' + Permission class. Contains the path and query_string for the path. + + path: the resource path + query_string: dict of name, values. Contains SIGNED_START, SIGNED_EXPIRY + SIGNED_RESOURCE, SIGNED_PERMISSION, SIGNED_IDENTIFIER, + SIGNED_SIGNATURE name values. + ''' + + def __init__(self, path=None, query_string=None): + self.path = path + self.query_string = query_string + + +class SharedAccessPolicy(object): + + ''' SharedAccessPolicy class. ''' + + def __init__(self, access_policy, signed_identifier=None): + self.id = signed_identifier + self.access_policy = access_policy + + +class SharedAccessSignature(object): + + ''' + The main class used to do the signing and generating the signature. + + account_name: + the storage account name used to generate shared access signature + account_key: the access key to genenerate share access signature + permission_set: the permission cache used to signed the request url. + ''' + + def __init__(self, account_name, account_key, permission_set=None): + self.account_name = account_name + self.account_key = account_key + self.permission_set = permission_set + + def generate_signed_query_string(self, path, resource_type, + shared_access_policy, + version=X_MS_VERSION): + ''' + Generates the query string for path, resource type and shared access + policy. + + path: the resource + resource_type: could be blob or container + shared_access_policy: shared access policy + version: + x-ms-version for storage service, or None to get a signed query + string compatible with pre 2012-02-12 clients, where the version + is not included in the query string. + ''' + + query_string = {} + if shared_access_policy.access_policy.start: + query_string[ + SIGNED_START] = shared_access_policy.access_policy.start + + if version: + query_string[SIGNED_VERSION] = version + query_string[SIGNED_EXPIRY] = shared_access_policy.access_policy.expiry + query_string[SIGNED_RESOURCE] = resource_type + query_string[ + SIGNED_PERMISSION] = shared_access_policy.access_policy.permission + + if shared_access_policy.id: + query_string[SIGNED_IDENTIFIER] = shared_access_policy.id + + query_string[SIGNED_SIGNATURE] = self._generate_signature( + path, shared_access_policy, version) + return query_string + + def sign_request(self, web_resource): + ''' sign request to generate request_url with sharedaccesssignature + info for web_resource.''' + + if self.permission_set: + for shared_access_signature in self.permission_set: + if self._permission_matches_request( + shared_access_signature, web_resource, + web_resource.properties[ + SIGNED_RESOURCE_TYPE], + web_resource.properties[SHARED_ACCESS_PERMISSION]): + if web_resource.request_url.find('?') == -1: + web_resource.request_url += '?' + else: + web_resource.request_url += '&' + + web_resource.request_url += self._convert_query_string( + shared_access_signature.query_string) + break + return web_resource + + def _convert_query_string(self, query_string): + ''' Converts query string to str. The order of name, values is very + important and can't be wrong.''' + + convert_str = '' + if SIGNED_START in query_string: + convert_str += SIGNED_START + '=' + \ + url_quote(query_string[SIGNED_START]) + '&' + convert_str += SIGNED_EXPIRY + '=' + \ + url_quote(query_string[SIGNED_EXPIRY]) + '&' + convert_str += SIGNED_PERMISSION + '=' + \ + query_string[SIGNED_PERMISSION] + '&' + convert_str += SIGNED_RESOURCE + '=' + \ + query_string[SIGNED_RESOURCE] + '&' + + if SIGNED_IDENTIFIER in query_string: + convert_str += SIGNED_IDENTIFIER + '=' + \ + query_string[SIGNED_IDENTIFIER] + '&' + if SIGNED_VERSION in query_string: + convert_str += SIGNED_VERSION + '=' + \ + query_string[SIGNED_VERSION] + '&' + convert_str += SIGNED_SIGNATURE + '=' + \ + url_quote(query_string[SIGNED_SIGNATURE]) + '&' + return convert_str + + def _generate_signature(self, path, shared_access_policy, version): + ''' Generates signature for a given path and shared access policy. ''' + + def get_value_to_append(value, no_new_line=False): + return_value = '' + if value: + return_value = value + if not no_new_line: + return_value += '\n' + return return_value + + if path[0] != '/': + path = '/' + path + + canonicalized_resource = '/' + self.account_name + path + + # Form the string to sign from shared_access_policy and canonicalized + # resource. The order of values is important. + string_to_sign = \ + (get_value_to_append(shared_access_policy.access_policy.permission) + + get_value_to_append(shared_access_policy.access_policy.start) + + get_value_to_append(shared_access_policy.access_policy.expiry) + + get_value_to_append(canonicalized_resource)) + + if version: + string_to_sign += get_value_to_append(shared_access_policy.id) + string_to_sign += get_value_to_append(version, True) + else: + string_to_sign += get_value_to_append(shared_access_policy.id, True) + + return self._sign(string_to_sign) + + def _permission_matches_request(self, shared_access_signature, + web_resource, resource_type, + required_permission): + ''' Check whether requested permission matches given + shared_access_signature, web_resource and resource type. ''' + + required_resource_type = resource_type + if required_resource_type == RESOURCE_BLOB: + required_resource_type += RESOURCE_CONTAINER + + for name, value in shared_access_signature.query_string.items(): + if name == SIGNED_RESOURCE and \ + required_resource_type.find(value) == -1: + return False + elif name == SIGNED_PERMISSION and \ + required_permission.find(value) == -1: + return False + + return web_resource.path.find(shared_access_signature.path) != -1 + + def _sign(self, string_to_sign): + ''' use HMAC-SHA256 to sign the string and convert it as base64 + encoded string. ''' + + return _sign_string(self.account_key, string_to_sign) diff --git a/awx/lib/site-packages/azure/storage/storageclient.py b/awx/lib/site-packages/azure/storage/storageclient.py index 7f160faff9..9b0fd56c4b 100644 --- a/awx/lib/site-packages/azure/storage/storageclient.py +++ b/awx/lib/site-packages/azure/storage/storageclient.py @@ -1,152 +1,152 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -import os -import sys - -from azure import ( - WindowsAzureError, - DEV_ACCOUNT_NAME, - DEV_ACCOUNT_KEY, - _ERROR_STORAGE_MISSING_INFO, - ) -from azure.http import HTTPError -from azure.http.httpclient import _HTTPClient -from azure.storage import _storage_error_handler - -#-------------------------------------------------------------------------- -# constants for azure app setting environment variables -AZURE_STORAGE_ACCOUNT = 'AZURE_STORAGE_ACCOUNT' -AZURE_STORAGE_ACCESS_KEY = 'AZURE_STORAGE_ACCESS_KEY' -EMULATED = 'EMULATED' - -#-------------------------------------------------------------------------- - - -class _StorageClient(object): - - ''' - This is the base class for BlobManager, TableManager and QueueManager. - ''' - - def __init__(self, account_name=None, account_key=None, protocol='https', - host_base='', dev_host=''): - ''' - account_name: your storage account name, required for all operations. - account_key: your storage account key, required for all operations. - protocol: Optional. Protocol. Defaults to http. - host_base: - Optional. Live host base url. Defaults to Azure url. Override this - for on-premise. - dev_host: Optional. Dev host url. Defaults to localhost. - ''' - self.account_name = account_name - self.account_key = account_key - self.requestid = None - self.protocol = protocol - self.host_base = host_base - self.dev_host = dev_host - - # the app is not run in azure emulator or use default development - # storage account and key if app is run in emulator. - self.use_local_storage = False - - # check whether it is run in emulator. - if EMULATED in os.environ: - self.is_emulated = os.environ[EMULATED].lower() != 'false' - else: - self.is_emulated = False - - # get account_name and account key. If they are not set when - # constructing, get the account and key from environment variables if - # the app is not run in azure emulator or use default development - # storage account and key if app is run in emulator. - if not self.account_name or not self.account_key: - if self.is_emulated: - self.account_name = DEV_ACCOUNT_NAME - self.account_key = DEV_ACCOUNT_KEY - self.protocol = 'http' - self.use_local_storage = True - else: - self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT) - self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY) - - if not self.account_name or not self.account_key: - raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO) - - self._httpclient = _HTTPClient( - service_instance=self, - account_key=self.account_key, - account_name=self.account_name, - protocol=self.protocol) - self._batchclient = None - self._filter = self._perform_request_worker - - def with_filter(self, filter): - ''' - Returns a new service which will process requests with the specified - filter. Filtering operations can include logging, automatic retrying, - etc... The filter is a lambda which receives the HTTPRequest and - another lambda. The filter can perform any pre-processing on the - request, pass it off to the next lambda, and then perform any - post-processing on the response. - ''' - res = type(self)(self.account_name, self.account_key, self.protocol) - old_filter = self._filter - - def new_filter(request): - return filter(request, old_filter) - - res._filter = new_filter - return res - - def set_proxy(self, host, port, user=None, password=None): - ''' - Sets the proxy server host and port for the HTTP CONNECT Tunnelling. - - host: Address of the proxy. Ex: '192.168.0.100' - port: Port of the proxy. Ex: 6000 - user: User for proxy authorization. - password: Password for proxy authorization. - ''' - self._httpclient.set_proxy(host, port, user, password) - - def _get_host(self): - if self.use_local_storage: - return self.dev_host - else: - return self.account_name + self.host_base - - def _perform_request_worker(self, request): - return self._httpclient.perform_request(request) - - def _perform_request(self, request, text_encoding='utf-8'): - ''' - Sends the request and return response. Catches HTTPError and hand it - to error handler - ''' - try: - if self._batchclient is not None: - return self._batchclient.insert_request_to_batch(request) - else: - resp = self._filter(request) - - if sys.version_info >= (3,) and isinstance(resp, bytes) and \ - text_encoding: - resp = resp.decode(text_encoding) - - except HTTPError as ex: - _storage_error_handler(ex) - - return resp +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +import os +import sys + +from azure import ( + WindowsAzureError, + DEV_ACCOUNT_NAME, + DEV_ACCOUNT_KEY, + _ERROR_STORAGE_MISSING_INFO, + ) +from azure.http import HTTPError +from azure.http.httpclient import _HTTPClient +from azure.storage import _storage_error_handler + +#-------------------------------------------------------------------------- +# constants for azure app setting environment variables +AZURE_STORAGE_ACCOUNT = 'AZURE_STORAGE_ACCOUNT' +AZURE_STORAGE_ACCESS_KEY = 'AZURE_STORAGE_ACCESS_KEY' +EMULATED = 'EMULATED' + +#-------------------------------------------------------------------------- + + +class _StorageClient(object): + + ''' + This is the base class for BlobManager, TableManager and QueueManager. + ''' + + def __init__(self, account_name=None, account_key=None, protocol='https', + host_base='', dev_host=''): + ''' + account_name: your storage account name, required for all operations. + account_key: your storage account key, required for all operations. + protocol: Optional. Protocol. Defaults to http. + host_base: + Optional. Live host base url. Defaults to Azure url. Override this + for on-premise. + dev_host: Optional. Dev host url. Defaults to localhost. + ''' + self.account_name = account_name + self.account_key = account_key + self.requestid = None + self.protocol = protocol + self.host_base = host_base + self.dev_host = dev_host + + # the app is not run in azure emulator or use default development + # storage account and key if app is run in emulator. + self.use_local_storage = False + + # check whether it is run in emulator. + if EMULATED in os.environ: + self.is_emulated = os.environ[EMULATED].lower() != 'false' + else: + self.is_emulated = False + + # get account_name and account key. If they are not set when + # constructing, get the account and key from environment variables if + # the app is not run in azure emulator or use default development + # storage account and key if app is run in emulator. + if not self.account_name or not self.account_key: + if self.is_emulated: + self.account_name = DEV_ACCOUNT_NAME + self.account_key = DEV_ACCOUNT_KEY + self.protocol = 'http' + self.use_local_storage = True + else: + self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT) + self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY) + + if not self.account_name or not self.account_key: + raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO) + + self._httpclient = _HTTPClient( + service_instance=self, + account_key=self.account_key, + account_name=self.account_name, + protocol=self.protocol) + self._batchclient = None + self._filter = self._perform_request_worker + + def with_filter(self, filter): + ''' + Returns a new service which will process requests with the specified + filter. Filtering operations can include logging, automatic retrying, + etc... The filter is a lambda which receives the HTTPRequest and + another lambda. The filter can perform any pre-processing on the + request, pass it off to the next lambda, and then perform any + post-processing on the response. + ''' + res = type(self)(self.account_name, self.account_key, self.protocol) + old_filter = self._filter + + def new_filter(request): + return filter(request, old_filter) + + res._filter = new_filter + return res + + def set_proxy(self, host, port, user=None, password=None): + ''' + Sets the proxy server host and port for the HTTP CONNECT Tunnelling. + + host: Address of the proxy. Ex: '192.168.0.100' + port: Port of the proxy. Ex: 6000 + user: User for proxy authorization. + password: Password for proxy authorization. + ''' + self._httpclient.set_proxy(host, port, user, password) + + def _get_host(self): + if self.use_local_storage: + return self.dev_host + else: + return self.account_name + self.host_base + + def _perform_request_worker(self, request): + return self._httpclient.perform_request(request) + + def _perform_request(self, request, text_encoding='utf-8'): + ''' + Sends the request and return response. Catches HTTPError and hand it + to error handler + ''' + try: + if self._batchclient is not None: + return self._batchclient.insert_request_to_batch(request) + else: + resp = self._filter(request) + + if sys.version_info >= (3,) and isinstance(resp, bytes) and \ + text_encoding: + resp = resp.decode(text_encoding) + + except HTTPError as ex: + _storage_error_handler(ex) + + return resp diff --git a/awx/lib/site-packages/azure/storage/tableservice.py b/awx/lib/site-packages/azure/storage/tableservice.py index 3fe58a73f1..f25c48a5c8 100644 --- a/awx/lib/site-packages/azure/storage/tableservice.py +++ b/awx/lib/site-packages/azure/storage/tableservice.py @@ -1,491 +1,491 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#-------------------------------------------------------------------------- -from azure import ( - WindowsAzureError, - TABLE_SERVICE_HOST_BASE, - DEV_TABLE_HOST, - _convert_class_to_xml, - _convert_response_to_feeds, - _dont_fail_not_exist, - _dont_fail_on_exist, - _get_request_body, - _int_or_none, - _parse_response, - _parse_response_for_dict, - _parse_response_for_dict_filter, - _str, - _str_or_none, - _update_request_uri_query_local_storage, - _validate_not_none, - ) -from azure.http import HTTPRequest -from azure.http.batchclient import _BatchClient -from azure.storage import ( - StorageServiceProperties, - _convert_entity_to_xml, - _convert_response_to_entity, - _convert_table_to_xml, - _convert_xml_to_entity, - _convert_xml_to_table, - _sign_storage_table_request, - _update_storage_table_header, - ) -from azure.storage.storageclient import _StorageClient - - -class TableService(_StorageClient): - - ''' - This is the main class managing Table resources. - ''' - - def __init__(self, account_name=None, account_key=None, protocol='https', - host_base=TABLE_SERVICE_HOST_BASE, dev_host=DEV_TABLE_HOST): - ''' - account_name: your storage account name, required for all operations. - account_key: your storage account key, required for all operations. - protocol: Optional. Protocol. Defaults to http. - host_base: - Optional. Live host base url. Defaults to Azure url. Override this - for on-premise. - dev_host: Optional. Dev host url. Defaults to localhost. - ''' - super(TableService, self).__init__( - account_name, account_key, protocol, host_base, dev_host) - - def begin_batch(self): - if self._batchclient is None: - self._batchclient = _BatchClient( - service_instance=self, - account_key=self.account_key, - account_name=self.account_name) - return self._batchclient.begin_batch() - - def commit_batch(self): - try: - ret = self._batchclient.commit_batch() - finally: - self._batchclient = None - return ret - - def cancel_batch(self): - self._batchclient = None - - def get_table_service_properties(self): - ''' - Gets the properties of a storage account's Table service, including - Windows Azure Storage Analytics. - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response(response, StorageServiceProperties) - - def set_table_service_properties(self, storage_service_properties): - ''' - Sets the properties of a storage account's Table Service, including - Windows Azure Storage Analytics. - - storage_service_properties: StorageServiceProperties object. - ''' - _validate_not_none('storage_service_properties', - storage_service_properties) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/?restype=service&comp=properties' - request.body = _get_request_body( - _convert_class_to_xml(storage_service_properties)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response_for_dict(response) - - def query_tables(self, table_name=None, top=None, next_table_name=None): - ''' - Returns a list of tables under the specified account. - - table_name: Optional. The specific table to query. - top: Optional. Maximum number of tables to return. - next_table_name: - Optional. When top is used, the next table name is stored in - result.x_ms_continuation['NextTableName'] - ''' - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - if table_name is not None: - uri_part_table_name = "('" + table_name + "')" - else: - uri_part_table_name = "" - request.path = '/Tables' + uri_part_table_name + '' - request.query = [ - ('$top', _int_or_none(top)), - ('NextTableName', _str_or_none(next_table_name)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, _convert_xml_to_table) - - def create_table(self, table, fail_on_exist=False): - ''' - Creates a new table in the storage account. - - table: - Name of the table to create. Table name may contain only - alphanumeric characters and cannot begin with a numeric character. - It is case-insensitive and must be from 3 to 63 characters long. - fail_on_exist: Specify whether throw exception when table exists. - ''' - _validate_not_none('table', table) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/Tables' - request.body = _get_request_body(_convert_table_to_xml(table)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - if not fail_on_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_on_exist(ex) - return False - else: - self._perform_request(request) - return True - - def delete_table(self, table_name, fail_not_exist=False): - ''' - table_name: Name of the table to delete. - fail_not_exist: - Specify whether throw exception when table doesn't exist. - ''' - _validate_not_none('table_name', table_name) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/Tables(\'' + _str(table_name) + '\')' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - if not fail_not_exist: - try: - self._perform_request(request) - return True - except WindowsAzureError as ex: - _dont_fail_not_exist(ex) - return False - else: - self._perform_request(request) - return True - - def get_entity(self, table_name, partition_key, row_key, select=''): - ''' - Get an entity in a table; includes the $select options. - - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - select: Property names to select. - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('select', select) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(table_name) + \ - '(PartitionKey=\'' + _str(partition_key) + \ - '\',RowKey=\'' + \ - _str(row_key) + '\')?$select=' + \ - _str(select) + '' - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _convert_response_to_entity(response) - - def query_entities(self, table_name, filter=None, select=None, top=None, - next_partition_key=None, next_row_key=None): - ''' - Get entities in a table; includes the $filter and $select options. - - table_name: Table to query. - filter: - Optional. Filter as described at - http://msdn.microsoft.com/en-us/library/windowsazure/dd894031.aspx - select: Optional. Property names to select from the entities. - top: Optional. Maximum number of entities to return. - next_partition_key: - Optional. When top is used, the next partition key is stored in - result.x_ms_continuation['NextPartitionKey'] - next_row_key: - Optional. When top is used, the next partition key is stored in - result.x_ms_continuation['NextRowKey'] - ''' - _validate_not_none('table_name', table_name) - request = HTTPRequest() - request.method = 'GET' - request.host = self._get_host() - request.path = '/' + _str(table_name) + '()' - request.query = [ - ('$filter', _str_or_none(filter)), - ('$select', _str_or_none(select)), - ('$top', _int_or_none(top)), - ('NextPartitionKey', _str_or_none(next_partition_key)), - ('NextRowKey', _str_or_none(next_row_key)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _convert_response_to_feeds(response, _convert_xml_to_entity) - - def insert_entity(self, table_name, entity, - content_type='application/atom+xml'): - ''' - Inserts a new entity into a table. - - table_name: Table name. - entity: - Required. The entity object to insert. Could be a dict format or - entity object. - content_type: Required. Must be set to application/atom+xml - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('entity', entity) - _validate_not_none('content_type', content_type) - request = HTTPRequest() - request.method = 'POST' - request.host = self._get_host() - request.path = '/' + _str(table_name) + '' - request.headers = [('Content-Type', _str_or_none(content_type))] - request.body = _get_request_body(_convert_entity_to_xml(entity)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _convert_response_to_entity(response) - - def update_entity(self, table_name, partition_key, row_key, entity, - content_type='application/atom+xml', if_match='*'): - ''' - Updates an existing entity in a table. The Update Entity operation - replaces the entire entity and can be used to remove properties. - - table_name: Table name. - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - entity: - Required. The entity object to insert. Could be a dict format or - entity object. - content_type: Required. Must be set to application/atom+xml - if_match: - Optional. Specifies the condition for which the merge should be - performed. To force an unconditional merge, set to the wildcard - character (*). - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('entity', entity) - _validate_not_none('content_type', content_type) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(table_name) + '(PartitionKey=\'' + \ - _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' - request.headers = [ - ('Content-Type', _str_or_none(content_type)), - ('If-Match', _str_or_none(if_match)) - ] - request.body = _get_request_body(_convert_entity_to_xml(entity)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response_for_dict_filter(response, filter=['etag']) - - def merge_entity(self, table_name, partition_key, row_key, entity, - content_type='application/atom+xml', if_match='*'): - ''' - Updates an existing entity by updating the entity's properties. This - operation does not replace the existing entity as the Update Entity - operation does. - - table_name: Table name. - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - entity: - Required. The entity object to insert. Can be a dict format or - entity object. - content_type: Required. Must be set to application/atom+xml - if_match: - Optional. Specifies the condition for which the merge should be - performed. To force an unconditional merge, set to the wildcard - character (*). - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('entity', entity) - _validate_not_none('content_type', content_type) - request = HTTPRequest() - request.method = 'MERGE' - request.host = self._get_host() - request.path = '/' + \ - _str(table_name) + '(PartitionKey=\'' + \ - _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' - request.headers = [ - ('Content-Type', _str_or_none(content_type)), - ('If-Match', _str_or_none(if_match)) - ] - request.body = _get_request_body(_convert_entity_to_xml(entity)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response_for_dict_filter(response, filter=['etag']) - - def delete_entity(self, table_name, partition_key, row_key, - content_type='application/atom+xml', if_match='*'): - ''' - Deletes an existing entity in a table. - - table_name: Table name. - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - content_type: Required. Must be set to application/atom+xml - if_match: - Optional. Specifies the condition for which the delete should be - performed. To force an unconditional delete, set to the wildcard - character (*). - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('content_type', content_type) - _validate_not_none('if_match', if_match) - request = HTTPRequest() - request.method = 'DELETE' - request.host = self._get_host() - request.path = '/' + \ - _str(table_name) + '(PartitionKey=\'' + \ - _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' - request.headers = [ - ('Content-Type', _str_or_none(content_type)), - ('If-Match', _str_or_none(if_match)) - ] - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - self._perform_request(request) - - def insert_or_replace_entity(self, table_name, partition_key, row_key, - entity, content_type='application/atom+xml'): - ''' - Replaces an existing entity or inserts a new entity if it does not - exist in the table. Because this operation can insert or update an - entity, it is also known as an "upsert" operation. - - table_name: Table name. - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - entity: - Required. The entity object to insert. Could be a dict format or - entity object. - content_type: Required. Must be set to application/atom+xml - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('entity', entity) - _validate_not_none('content_type', content_type) - request = HTTPRequest() - request.method = 'PUT' - request.host = self._get_host() - request.path = '/' + \ - _str(table_name) + '(PartitionKey=\'' + \ - _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' - request.headers = [('Content-Type', _str_or_none(content_type))] - request.body = _get_request_body(_convert_entity_to_xml(entity)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response_for_dict_filter(response, filter=['etag']) - - def insert_or_merge_entity(self, table_name, partition_key, row_key, - entity, content_type='application/atom+xml'): - ''' - Merges an existing entity or inserts a new entity if it does not exist - in the table. Because this operation can insert or update an entity, - it is also known as an "upsert" operation. - - table_name: Table name. - partition_key: PartitionKey of the entity. - row_key: RowKey of the entity. - entity: - Required. The entity object to insert. Could be a dict format or - entity object. - content_type: Required. Must be set to application/atom+xml - ''' - _validate_not_none('table_name', table_name) - _validate_not_none('partition_key', partition_key) - _validate_not_none('row_key', row_key) - _validate_not_none('entity', entity) - _validate_not_none('content_type', content_type) - request = HTTPRequest() - request.method = 'MERGE' - request.host = self._get_host() - request.path = '/' + \ - _str(table_name) + '(PartitionKey=\'' + \ - _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' - request.headers = [('Content-Type', _str_or_none(content_type))] - request.body = _get_request_body(_convert_entity_to_xml(entity)) - request.path, request.query = _update_request_uri_query_local_storage( - request, self.use_local_storage) - request.headers = _update_storage_table_header(request) - response = self._perform_request(request) - - return _parse_response_for_dict_filter(response, filter=['etag']) - - def _perform_request_worker(self, request): - auth = _sign_storage_table_request(request, - self.account_name, - self.account_key) - request.headers.append(('Authorization', auth)) - return self._httpclient.perform_request(request) +#------------------------------------------------------------------------- +# Copyright (c) Microsoft. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#-------------------------------------------------------------------------- +from azure import ( + WindowsAzureError, + TABLE_SERVICE_HOST_BASE, + DEV_TABLE_HOST, + _convert_class_to_xml, + _convert_response_to_feeds, + _dont_fail_not_exist, + _dont_fail_on_exist, + _get_request_body, + _int_or_none, + _parse_response, + _parse_response_for_dict, + _parse_response_for_dict_filter, + _str, + _str_or_none, + _update_request_uri_query_local_storage, + _validate_not_none, + ) +from azure.http import HTTPRequest +from azure.http.batchclient import _BatchClient +from azure.storage import ( + StorageServiceProperties, + _convert_entity_to_xml, + _convert_response_to_entity, + _convert_table_to_xml, + _convert_xml_to_entity, + _convert_xml_to_table, + _sign_storage_table_request, + _update_storage_table_header, + ) +from azure.storage.storageclient import _StorageClient + + +class TableService(_StorageClient): + + ''' + This is the main class managing Table resources. + ''' + + def __init__(self, account_name=None, account_key=None, protocol='https', + host_base=TABLE_SERVICE_HOST_BASE, dev_host=DEV_TABLE_HOST): + ''' + account_name: your storage account name, required for all operations. + account_key: your storage account key, required for all operations. + protocol: Optional. Protocol. Defaults to http. + host_base: + Optional. Live host base url. Defaults to Azure url. Override this + for on-premise. + dev_host: Optional. Dev host url. Defaults to localhost. + ''' + super(TableService, self).__init__( + account_name, account_key, protocol, host_base, dev_host) + + def begin_batch(self): + if self._batchclient is None: + self._batchclient = _BatchClient( + service_instance=self, + account_key=self.account_key, + account_name=self.account_name) + return self._batchclient.begin_batch() + + def commit_batch(self): + try: + ret = self._batchclient.commit_batch() + finally: + self._batchclient = None + return ret + + def cancel_batch(self): + self._batchclient = None + + def get_table_service_properties(self): + ''' + Gets the properties of a storage account's Table service, including + Windows Azure Storage Analytics. + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response(response, StorageServiceProperties) + + def set_table_service_properties(self, storage_service_properties): + ''' + Sets the properties of a storage account's Table Service, including + Windows Azure Storage Analytics. + + storage_service_properties: StorageServiceProperties object. + ''' + _validate_not_none('storage_service_properties', + storage_service_properties) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/?restype=service&comp=properties' + request.body = _get_request_body( + _convert_class_to_xml(storage_service_properties)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response_for_dict(response) + + def query_tables(self, table_name=None, top=None, next_table_name=None): + ''' + Returns a list of tables under the specified account. + + table_name: Optional. The specific table to query. + top: Optional. Maximum number of tables to return. + next_table_name: + Optional. When top is used, the next table name is stored in + result.x_ms_continuation['NextTableName'] + ''' + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + if table_name is not None: + uri_part_table_name = "('" + table_name + "')" + else: + uri_part_table_name = "" + request.path = '/Tables' + uri_part_table_name + '' + request.query = [ + ('$top', _int_or_none(top)), + ('NextTableName', _str_or_none(next_table_name)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, _convert_xml_to_table) + + def create_table(self, table, fail_on_exist=False): + ''' + Creates a new table in the storage account. + + table: + Name of the table to create. Table name may contain only + alphanumeric characters and cannot begin with a numeric character. + It is case-insensitive and must be from 3 to 63 characters long. + fail_on_exist: Specify whether throw exception when table exists. + ''' + _validate_not_none('table', table) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/Tables' + request.body = _get_request_body(_convert_table_to_xml(table)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + if not fail_on_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_on_exist(ex) + return False + else: + self._perform_request(request) + return True + + def delete_table(self, table_name, fail_not_exist=False): + ''' + table_name: Name of the table to delete. + fail_not_exist: + Specify whether throw exception when table doesn't exist. + ''' + _validate_not_none('table_name', table_name) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/Tables(\'' + _str(table_name) + '\')' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + if not fail_not_exist: + try: + self._perform_request(request) + return True + except WindowsAzureError as ex: + _dont_fail_not_exist(ex) + return False + else: + self._perform_request(request) + return True + + def get_entity(self, table_name, partition_key, row_key, select=''): + ''' + Get an entity in a table; includes the $select options. + + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + select: Property names to select. + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('select', select) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(table_name) + \ + '(PartitionKey=\'' + _str(partition_key) + \ + '\',RowKey=\'' + \ + _str(row_key) + '\')?$select=' + \ + _str(select) + '' + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _convert_response_to_entity(response) + + def query_entities(self, table_name, filter=None, select=None, top=None, + next_partition_key=None, next_row_key=None): + ''' + Get entities in a table; includes the $filter and $select options. + + table_name: Table to query. + filter: + Optional. Filter as described at + http://msdn.microsoft.com/en-us/library/windowsazure/dd894031.aspx + select: Optional. Property names to select from the entities. + top: Optional. Maximum number of entities to return. + next_partition_key: + Optional. When top is used, the next partition key is stored in + result.x_ms_continuation['NextPartitionKey'] + next_row_key: + Optional. When top is used, the next partition key is stored in + result.x_ms_continuation['NextRowKey'] + ''' + _validate_not_none('table_name', table_name) + request = HTTPRequest() + request.method = 'GET' + request.host = self._get_host() + request.path = '/' + _str(table_name) + '()' + request.query = [ + ('$filter', _str_or_none(filter)), + ('$select', _str_or_none(select)), + ('$top', _int_or_none(top)), + ('NextPartitionKey', _str_or_none(next_partition_key)), + ('NextRowKey', _str_or_none(next_row_key)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _convert_response_to_feeds(response, _convert_xml_to_entity) + + def insert_entity(self, table_name, entity, + content_type='application/atom+xml'): + ''' + Inserts a new entity into a table. + + table_name: Table name. + entity: + Required. The entity object to insert. Could be a dict format or + entity object. + content_type: Required. Must be set to application/atom+xml + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('entity', entity) + _validate_not_none('content_type', content_type) + request = HTTPRequest() + request.method = 'POST' + request.host = self._get_host() + request.path = '/' + _str(table_name) + '' + request.headers = [('Content-Type', _str_or_none(content_type))] + request.body = _get_request_body(_convert_entity_to_xml(entity)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _convert_response_to_entity(response) + + def update_entity(self, table_name, partition_key, row_key, entity, + content_type='application/atom+xml', if_match='*'): + ''' + Updates an existing entity in a table. The Update Entity operation + replaces the entire entity and can be used to remove properties. + + table_name: Table name. + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + entity: + Required. The entity object to insert. Could be a dict format or + entity object. + content_type: Required. Must be set to application/atom+xml + if_match: + Optional. Specifies the condition for which the merge should be + performed. To force an unconditional merge, set to the wildcard + character (*). + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('entity', entity) + _validate_not_none('content_type', content_type) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(table_name) + '(PartitionKey=\'' + \ + _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' + request.headers = [ + ('Content-Type', _str_or_none(content_type)), + ('If-Match', _str_or_none(if_match)) + ] + request.body = _get_request_body(_convert_entity_to_xml(entity)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response_for_dict_filter(response, filter=['etag']) + + def merge_entity(self, table_name, partition_key, row_key, entity, + content_type='application/atom+xml', if_match='*'): + ''' + Updates an existing entity by updating the entity's properties. This + operation does not replace the existing entity as the Update Entity + operation does. + + table_name: Table name. + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + entity: + Required. The entity object to insert. Can be a dict format or + entity object. + content_type: Required. Must be set to application/atom+xml + if_match: + Optional. Specifies the condition for which the merge should be + performed. To force an unconditional merge, set to the wildcard + character (*). + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('entity', entity) + _validate_not_none('content_type', content_type) + request = HTTPRequest() + request.method = 'MERGE' + request.host = self._get_host() + request.path = '/' + \ + _str(table_name) + '(PartitionKey=\'' + \ + _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' + request.headers = [ + ('Content-Type', _str_or_none(content_type)), + ('If-Match', _str_or_none(if_match)) + ] + request.body = _get_request_body(_convert_entity_to_xml(entity)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response_for_dict_filter(response, filter=['etag']) + + def delete_entity(self, table_name, partition_key, row_key, + content_type='application/atom+xml', if_match='*'): + ''' + Deletes an existing entity in a table. + + table_name: Table name. + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + content_type: Required. Must be set to application/atom+xml + if_match: + Optional. Specifies the condition for which the delete should be + performed. To force an unconditional delete, set to the wildcard + character (*). + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('content_type', content_type) + _validate_not_none('if_match', if_match) + request = HTTPRequest() + request.method = 'DELETE' + request.host = self._get_host() + request.path = '/' + \ + _str(table_name) + '(PartitionKey=\'' + \ + _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' + request.headers = [ + ('Content-Type', _str_or_none(content_type)), + ('If-Match', _str_or_none(if_match)) + ] + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + self._perform_request(request) + + def insert_or_replace_entity(self, table_name, partition_key, row_key, + entity, content_type='application/atom+xml'): + ''' + Replaces an existing entity or inserts a new entity if it does not + exist in the table. Because this operation can insert or update an + entity, it is also known as an "upsert" operation. + + table_name: Table name. + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + entity: + Required. The entity object to insert. Could be a dict format or + entity object. + content_type: Required. Must be set to application/atom+xml + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('entity', entity) + _validate_not_none('content_type', content_type) + request = HTTPRequest() + request.method = 'PUT' + request.host = self._get_host() + request.path = '/' + \ + _str(table_name) + '(PartitionKey=\'' + \ + _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' + request.headers = [('Content-Type', _str_or_none(content_type))] + request.body = _get_request_body(_convert_entity_to_xml(entity)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response_for_dict_filter(response, filter=['etag']) + + def insert_or_merge_entity(self, table_name, partition_key, row_key, + entity, content_type='application/atom+xml'): + ''' + Merges an existing entity or inserts a new entity if it does not exist + in the table. Because this operation can insert or update an entity, + it is also known as an "upsert" operation. + + table_name: Table name. + partition_key: PartitionKey of the entity. + row_key: RowKey of the entity. + entity: + Required. The entity object to insert. Could be a dict format or + entity object. + content_type: Required. Must be set to application/atom+xml + ''' + _validate_not_none('table_name', table_name) + _validate_not_none('partition_key', partition_key) + _validate_not_none('row_key', row_key) + _validate_not_none('entity', entity) + _validate_not_none('content_type', content_type) + request = HTTPRequest() + request.method = 'MERGE' + request.host = self._get_host() + request.path = '/' + \ + _str(table_name) + '(PartitionKey=\'' + \ + _str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')' + request.headers = [('Content-Type', _str_or_none(content_type))] + request.body = _get_request_body(_convert_entity_to_xml(entity)) + request.path, request.query = _update_request_uri_query_local_storage( + request, self.use_local_storage) + request.headers = _update_storage_table_header(request) + response = self._perform_request(request) + + return _parse_response_for_dict_filter(response, filter=['etag']) + + def _perform_request_worker(self, request): + auth = _sign_storage_table_request(request, + self.account_name, + self.account_key) + request.headers.append(('Authorization', auth)) + return self._httpclient.perform_request(request) diff --git a/awx/lib/site-packages/dateutil/__init__.py b/awx/lib/site-packages/dateutil/__init__.py index 1020e72919..f8fde9ba08 100644 --- a/awx/lib/site-packages/dateutil/__init__.py +++ b/awx/lib/site-packages/dateutil/__init__.py @@ -1,10 +1,2 @@ # -*- coding: utf-8 -*- -""" -Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. -""" -__author__ = "Tomi Pieviläinen <tomi.pievilainen@iki.fi>" -__license__ = "Simplified BSD" -__version__ = "2.2" +__version__ = "2.4.0" diff --git a/awx/lib/site-packages/dateutil/easter.py b/awx/lib/site-packages/dateutil/easter.py index d8a38844f9..8d30c4ebda 100644 --- a/awx/lib/site-packages/dateutil/easter.py +++ b/awx/lib/site-packages/dateutil/easter.py @@ -1,18 +1,17 @@ +# -*- coding: utf-8 -*- """ -Copyright (c) 2003-2007 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. """ -__license__ = "Simplified BSD" import datetime __all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] -EASTER_JULIAN = 1 +EASTER_JULIAN = 1 EASTER_ORTHODOX = 2 -EASTER_WESTERN = 3 +EASTER_WESTERN = 3 + def easter(year, method=EASTER_WESTERN): """ @@ -24,7 +23,7 @@ def easter(year, method=EASTER_WESTERN): This algorithm implements three different easter calculation methods: - + 1 - Original calculation in Julian calendar, valid in dates after 326 AD 2 - Original method, with date converted to Gregorian @@ -39,7 +38,7 @@ def easter(year, method=EASTER_WESTERN): EASTER_WESTERN = 3 The default method is method 3. - + More about the algorithm may be found at: http://users.chariot.net.au/~gmarts/eastalg.htm @@ -68,24 +67,23 @@ def easter(year, method=EASTER_WESTERN): e = 0 if method < 3: # Old method - i = (19*g+15)%30 - j = (y+y//4+i)%7 + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 if method == 2: # Extra dates to convert Julian to Gregorian date e = 10 if y > 1600: - e = e+y//100-16-(y//100-16)//4 + e = e + y//100 - 16 - (y//100 - 16)//4 else: # New method c = y//100 - h = (c-c//4-(8*c+13)//25+19*g+15)%30 - i = h-(h//28)*(1-(h//28)*(29//(h+1))*((21-g)//11)) - j = (y+y//4+i+2-c+c//4)%7 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 # p can be from -6 to 56 corresponding to dates 22 March to 23 May # (later dates apply to method 2, although 23 May never actually occurs) - p = i-j+e - d = 1+(p+27+(p+6)//40)%31 - m = 3+(p+26)//30 + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 return datetime.date(int(y), int(m), int(d)) - diff --git a/awx/lib/site-packages/dateutil/parser.py b/awx/lib/site-packages/dateutil/parser.py index aef836238c..8b6c2d28b8 100644 --- a/awx/lib/site-packages/dateutil/parser.py +++ b/awx/lib/site-packages/dateutil/parser.py @@ -1,32 +1,21 @@ # -*- coding:iso-8859-1 -*- """ -Copyright (c) 2003-2007 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. """ from __future__ import unicode_literals -__license__ = "Simplified BSD" - import datetime import string import time -import sys -import os import collections - -try: - from io import StringIO -except ImportError: - from io import StringIO +from io import StringIO from six import text_type, binary_type, integer_types from . import relativedelta from . import tz - __all__ = ["parse", "parserinfo"] @@ -83,9 +72,9 @@ class _timelex(object): state = '0' elif nextchar in whitespace: token = ' ' - break # emit token + break # emit token else: - break # emit token + break # emit token elif state == 'a': seenletters = True if nextchar in wordchars: @@ -95,7 +84,7 @@ class _timelex(object): state = 'a.' else: self.charstack.append(nextchar) - break # emit token + break # emit token elif state == '0': if nextchar in numchars: token += nextchar @@ -104,7 +93,7 @@ class _timelex(object): state = '0.' else: self.charstack.append(nextchar) - break # emit token + break # emit token elif state == 'a.': seenletters = True if nextchar == '.' or nextchar in wordchars: @@ -114,7 +103,7 @@ class _timelex(object): state = '0.' else: self.charstack.append(nextchar) - break # emit token + break # emit token elif state == '0.': if nextchar == '.' or nextchar in numchars: token += nextchar @@ -123,9 +112,9 @@ class _timelex(object): state = 'a.' else: self.charstack.append(nextchar) - break # emit token - if (state in ('a.', '0.') and - (seenletters or token.count('.') > 1 or token[-1] == '.')): + break # emit token + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] == '.')): l = token.split('.') token = l[0] for tok in l[1:]: @@ -183,18 +172,18 @@ class parserinfo(object): ("Fri", "Friday"), ("Sat", "Saturday"), ("Sun", "Sunday")] - MONTHS = [("Jan", "January"), - ("Feb", "February"), - ("Mar", "March"), - ("Apr", "April"), - ("May", "May"), - ("Jun", "June"), - ("Jul", "July"), - ("Aug", "August"), - ("Sep", "Sept", "September"), - ("Oct", "October"), - ("Nov", "November"), - ("Dec", "December")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] HMS = [("h", "hour", "hours"), ("m", "minute", "minutes"), ("s", "second", "seconds")] @@ -299,15 +288,16 @@ class parser(object): def __init__(self, info=None): self.info = info or parserinfo() - def parse(self, timestr, default=None, - ignoretz=False, tzinfos=None, - **kwargs): + def parse(self, timestr, default=None, ignoretz=False, tzinfos=None, + **kwargs): if not default: default = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - - res, skipped_tokens = self._parse(timestr, **kwargs) + if kwargs.get('fuzzy_with_tokens', False): + res, skipped_tokens = self._parse(timestr, **kwargs) + else: + res = self._parse(timestr, **kwargs) if res is None: raise ValueError("unknown string format") @@ -321,7 +311,8 @@ class parser(object): if res.weekday is not None and not res.day: ret = ret+relativedelta.relativedelta(weekday=res.weekday) if not ignoretz: - if isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos: + if (isinstance(tzinfos, collections.Callable) or + tzinfos and res.tzname in tzinfos): if isinstance(tzinfos, collections.Callable): tzdata = tzinfos(res.tzname, res.tzoffset) else: @@ -333,8 +324,8 @@ class parser(object): elif isinstance(tzdata, integer_types): tzinfo = tz.tzoffset(res.tzname, tzdata) else: - raise ValueError("offset must be tzinfo subclass, " \ - "tz string, or int offset") + raise ValueError("offset must be tzinfo subclass, " + "tz string, or int offset") ret = ret.replace(tzinfo=tzinfo) elif res.tzname and res.tzname in time.tzname: ret = ret.replace(tzinfo=tz.tzlocal()) @@ -343,17 +334,18 @@ class parser(object): elif res.tzoffset: ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) - if skipped_tokens: + if kwargs.get('fuzzy_with_tokens', False): return ret, skipped_tokens - - return ret + else: + return ret class _result(_resultbase): __slots__ = ["year", "month", "day", "weekday", "hour", "minute", "second", "microsecond", "tzname", "tzoffset"] - def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, fuzzy_with_tokens=False): + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): if fuzzy_with_tokens: fuzzy = True @@ -365,7 +357,6 @@ class parser(object): res = self._result() l = _timelex.split(timestr) - # keep up with the last token skipped so we can recombine # consecutively skipped tokens (-2 for when i begins at 0). last_skipped_token_i = -2 @@ -440,12 +431,12 @@ class parser(object): while True: if idx == 0: res.hour = int(value) - if value%1: - res.minute = int(60*(value%1)) + if value % 1: + res.minute = int(60*(value % 1)) elif idx == 1: res.minute = int(value) - if value%1: - res.second = int(60*(value%1)) + if value % 1: + res.second = int(60*(value % 1)) elif idx == 2: res.second, res.microsecond = \ _parsems(value_repr) @@ -465,16 +456,17 @@ class parser(object): newidx = info.hms(l[i]) if newidx is not None: idx = newidx - elif i == len_l and l[i-2] == ' ' and info.hms(l[i-3]) is not None: + elif (i == len_l and l[i-2] == ' ' and + info.hms(l[i-3]) is not None): # X h MM or X m SS idx = info.hms(l[i-3]) + 1 if idx == 1: res.minute = int(value) - if value%1: - res.second = int(60*(value%1)) + if value % 1: + res.second = int(60*(value % 1)) elif idx == 2: res.second, res.microsecond = \ - _parsems(value_repr) + _parsems(value_repr) i += 1 elif i+1 < len_l and l[i] == ':': # HH:MM[:SS[.ss]] @@ -482,8 +474,8 @@ class parser(object): i += 1 value = float(l[i]) res.minute = int(value) - if value%1: - res.second = int(60*(value%1)) + if value % 1: + res.second = int(60*(value % 1)) i += 1 if i < len_l and l[i] == ':': res.second, res.microsecond = _parsems(l[i+1]) @@ -597,8 +589,9 @@ class parser(object): # Check for a timezone name if (res.hour is not None and len(l[i]) <= 5 and - res.tzname is None and res.tzoffset is None and - not [x for x in l[i] if x not in string.ascii_uppercase]): + res.tzname is None and res.tzoffset is None and + not [x for x in l[i] if x not in + string.ascii_uppercase]): res.tzname = l[i] res.tzoffset = info.tzoffset(res.tzname) i += 1 @@ -643,7 +636,7 @@ class parser(object): info.jump(l[i]) and l[i+1] == '(' and l[i+3] == ')' and 3 <= len(l[i+2]) <= 5 and not [x for x in l[i+2] - if x not in string.ascii_uppercase]): + if x not in string.ascii_uppercase]): # -0300 (BRST) res.tzname = l[i+2] i += 4 @@ -732,10 +725,12 @@ class parser(object): if fuzzy_with_tokens: return res, tuple(skipped_tokens) - - return res, None + else: + return res DEFAULTPARSER = parser() + + def parse(timestr, parserinfo=None, **kwargs): # Python 2.x support: datetimes return their string presentation as # bytes in 2.x and unicode in 3.x, so it's reasonable to expect that @@ -779,7 +774,7 @@ class _tzparser(object): # BRST+3[BRDT[+2]] j = i while j < len_l and not [x for x in l[j] - if x in "0123456789:,-+"]: + if x in "0123456789:,-+"]: j += 1 if j != i: if not res.stdabbr: @@ -789,8 +784,8 @@ class _tzparser(object): offattr = "dstoffset" res.dstabbr = "".join(l[i:j]) i = j - if (i < len_l and - (l[i] in ('+', '-') or l[i][0] in "0123456789")): + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): if l[i] in ('+', '-'): # Yes, that's right. See the TZ variable # documentation. @@ -801,8 +796,8 @@ class _tzparser(object): len_li = len(l[i]) if len_li == 4: # -0300 - setattr(res, offattr, - (int(l[i][:2])*3600+int(l[i][2:])*60)*signal) + setattr(res, offattr, (int(l[i][:2])*3600 + + int(l[i][2:])*60)*signal) elif i+1 < len_l and l[i+1] == ':': # -03:00 setattr(res, offattr, @@ -822,7 +817,8 @@ class _tzparser(object): if i < len_l: for j in range(i, len_l): - if l[j] == ';': l[j] = ',' + if l[j] == ';': + l[j] = ',' assert l[i] == ',' @@ -831,7 +827,7 @@ class _tzparser(object): if i >= len_l: pass elif (8 <= l.count(',') <= 9 and - not [y for x in l[i:] if x != ',' + not [y for x in l[i:] if x != ',' for y in x if y not in "0123456789"]): # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] for x in (res.start, res.end): @@ -845,7 +841,7 @@ class _tzparser(object): i += 2 if value: x.week = value - x.weekday = (int(l[i])-1)%7 + x.weekday = (int(l[i])-1) % 7 else: x.day = int(l[i]) i += 2 @@ -861,7 +857,7 @@ class _tzparser(object): elif (l.count(',') == 2 and l[i:].count('/') <= 2 and not [y for x in l[i:] if x not in (',', '/', 'J', 'M', '.', '-', ':') - for y in x if y not in "0123456789"]): + for y in x if y not in "0123456789"]): for x in (res.start, res.end): if l[i] == 'J': # non-leap year day (1 based) @@ -880,7 +876,7 @@ class _tzparser(object): i += 1 assert l[i] in ('-', '.') i += 1 - x.weekday = (int(l[i])-1)%7 + x.weekday = (int(l[i])-1) % 7 else: # year day (zero based) x.yday = int(l[i])+1 @@ -921,6 +917,8 @@ class _tzparser(object): DEFAULTTZPARSER = _tzparser() + + def _parsetz(tzstr): return DEFAULTTZPARSER.parse(tzstr) diff --git a/awx/lib/site-packages/dateutil/relativedelta.py b/awx/lib/site-packages/dateutil/relativedelta.py index 4393bcbcde..2b5557a96f 100644 --- a/awx/lib/site-packages/dateutil/relativedelta.py +++ b/awx/lib/site-packages/dateutil/relativedelta.py @@ -1,11 +1,4 @@ -""" -Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. -""" -__license__ = "Simplified BSD" - +# -*- coding: utf-8 -*- import datetime import calendar @@ -13,6 +6,7 @@ from six import integer_types __all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + class weekday(object): __slots__ = ["weekday", "n"] @@ -43,25 +37,35 @@ class weekday(object): MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)]) + class relativedelta(object): """ -The relativedelta type is based on the specification of the excelent -work done by M.-A. Lemburg in his mx.DateTime extension. However, -notice that this type does *NOT* implement the same algorithm as +The relativedelta type is based on the specification of the excellent +work done by M.-A. Lemburg in his +`mx.DateTime <http://www.egenix.com/files/python/mxDateTime.html>`_ extension. +However, notice that this type does *NOT* implement the same algorithm as his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. -There's two different ways to build a relativedelta instance. The -first one is passing it two date/datetime classes: +There are two different ways to build a relativedelta instance. The +first one is passing it two date/datetime classes:: relativedelta(datetime1, datetime2) -And the other way is to use the following keyword arguments: +The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) year, month, day, hour, minute, second, microsecond: - Absolute information. + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an aritmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. years, months, weeks, days, hours, minutes, seconds, microseconds: - Relative information, may be negative. + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding aritmetic operation on the original datetime value + with the information in the relativedelta. weekday: One of the weekday instances (MO, TU, etc). These instances may @@ -80,26 +84,26 @@ And the other way is to use the following keyword arguments: Here is the behavior of operations with relativedelta: -1) Calculate the absolute year, using the 'year' argument, or the +1. Calculate the absolute year, using the 'year' argument, or the original datetime year, if the argument is not present. -2) Add the relative 'years' argument to the absolute year. +2. Add the relative 'years' argument to the absolute year. -3) Do steps 1 and 2 for month/months. +3. Do steps 1 and 2 for month/months. -4) Calculate the absolute day, using the 'day' argument, or the +4. Calculate the absolute day, using the 'day' argument, or the original datetime day, if the argument is not present. Then, subtract from the day until it fits in the year and month found after their operations. -5) Add the relative 'days' argument to the absolute day. Notice +5. Add the relative 'days' argument to the absolute day. Notice that the 'weeks' argument is multiplied by 7 and added to 'days'. -6) Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds, +6. Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds, microsecond/microseconds. -7) If the 'weekday' argument is present, calculate the weekday, +7. If the 'weekday' argument is present, calculate the weekday, with the given (wday, nth) tuple. wday is the index of the weekday (0-6, 0=Mon), and nth is the number of weeks to add forward or backward, depending on its signal. Notice that if @@ -114,9 +118,14 @@ Here is the behavior of operations with relativedelta: yearday=None, nlyearday=None, hour=None, minute=None, second=None, microsecond=None): if dt1 and dt2: - if (not isinstance(dt1, datetime.date)) or (not isinstance(dt2, datetime.date)): + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): raise TypeError("relativedelta only diffs datetime/date") - if not type(dt1) == type(dt2): #isinstance(dt1, type(dt2)): + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): if not isinstance(dt1, datetime.datetime): dt1 = datetime.datetime.fromordinal(dt1.toordinal()) elif not isinstance(dt2, datetime.datetime): @@ -185,7 +194,8 @@ Here is the behavior of operations with relativedelta: if yearday > 59: self.leapdays = -1 if yday: - ydayidx = [31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 366] + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] for idx, ydays in enumerate(ydayidx): if yday <= ydays: self.month = idx+1 @@ -225,9 +235,9 @@ Here is the behavior of operations with relativedelta: div, mod = divmod(self.months*s, 12) self.months = mod*s self.years += div*s - if (self.hours or self.minutes or self.seconds or self.microseconds or - self.hour is not None or self.minute is not None or - self.second is not None or self.microsecond is not None): + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): self._has_time = 1 else: self._has_time = 0 @@ -245,21 +255,23 @@ Here is the behavior of operations with relativedelta: def __add__(self, other): if isinstance(other, relativedelta): return relativedelta(years=other.years+self.years, - months=other.months+self.months, - days=other.days+self.days, - hours=other.hours+self.hours, - minutes=other.minutes+self.minutes, - seconds=other.seconds+self.seconds, - microseconds=other.microseconds+self.microseconds, - leapdays=other.leapdays or self.leapdays, - year=other.year or self.year, - month=other.month or self.month, - day=other.day or self.day, - weekday=other.weekday or self.weekday, - hour=other.hour or self.hour, - minute=other.minute or self.minute, - second=other.second or self.second, - microsecond=other.microsecond or self.microsecond) + months=other.months+self.months, + days=other.days+self.days, + hours=other.hours+self.hours, + minutes=other.minutes+self.minutes, + seconds=other.seconds+self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=other.year or self.year, + month=other.month or self.month, + day=other.day or self.day, + weekday=other.weekday or self.weekday, + hour=other.hour or self.hour, + minute=other.minute or self.minute, + second=other.second or self.second, + microsecond=(other.microsecond or + self.microsecond)) if not isinstance(other, datetime.date): raise TypeError("unsupported type for add operation") elif self._has_time and not isinstance(other, datetime.datetime): @@ -295,9 +307,9 @@ Here is the behavior of operations with relativedelta: weekday, nth = self.weekday.weekday, self.weekday.n or 1 jumpdays = (abs(nth)-1)*7 if nth > 0: - jumpdays += (7-ret.weekday()+weekday)%7 + jumpdays += (7-ret.weekday()+weekday) % 7 else: - jumpdays += (ret.weekday()-weekday)%7 + jumpdays += (ret.weekday()-weekday) % 7 jumpdays *= -1 ret += datetime.timedelta(days=jumpdays) return ret diff --git a/awx/lib/site-packages/dateutil/rrule.py b/awx/lib/site-packages/dateutil/rrule.py index ad4d3ba70c..401bc8ab03 100644 --- a/awx/lib/site-packages/dateutil/rrule.py +++ b/awx/lib/site-packages/dateutil/rrule.py @@ -1,21 +1,19 @@ +# -*- coding: utf-8 -*- """ -Copyright (c) 2003-2010 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC <http://www.ietf.org/rfc/rfc2445.txt>`_, +including support for caching of results. """ -__license__ = "Simplified BSD" - import itertools import datetime import calendar -try: - import _thread -except ImportError: - import thread as _thread import sys +from fractions import gcd + from six import advance_iterator, integer_types +from six.moves import _thread __all__ = ["rrule", "rruleset", "rrulestr", "YEARLY", "MONTHLY", "WEEKLY", "DAILY", @@ -23,7 +21,7 @@ __all__ = ["rrule", "rruleset", "rrulestr", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] # Every mask is 7 days longer to handle cross-year weekly periods. -M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30+ +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) M365MASK = list(M366MASK) M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) @@ -51,6 +49,7 @@ M365MASK = tuple(M365MASK) easter = None parser = None + class weekday(object): __slots__ = ["weekday", "n"] @@ -83,12 +82,13 @@ class weekday(object): MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)]) + class rrulebase(object): def __init__(self, cache=False): if cache: self._cache = [] self._cache_lock = _thread.allocate_lock() - self._cache_gen = self._iter() + self._cache_gen = self._iter() self._cache_complete = False else: self._cache = None @@ -163,11 +163,17 @@ class rrulebase(object): # __len__() introduces a large performance penality. def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ if self._len is None: - for x in self: pass + for x in self: + pass return self._len def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ if self._cache_complete: gen = self._cache else: @@ -186,6 +192,9 @@ class rrulebase(object): return last def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ if self._cache_complete: gen = self._cache else: @@ -201,6 +210,10 @@ class rrulebase(object): return None def between(self, after, before, inc=False): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ if self._cache_complete: gen = self._cache else: @@ -229,7 +242,93 @@ class rrulebase(object): l.append(i) return l + class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + Additionally, it supports the following keyword arguments: + + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + How many occurrences will be generated. + :param until: + If given, this must be a datetime instance, that will specify the + limit of the recurrence. If a recurrence instance happens to be the + same as the datetime instance given in the until keyword, this will + be the last occurrence. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + """ def __init__(self, freq, dtstart=None, interval=1, wkst=None, count=None, until=None, bysetpos=None, bymonth=None, bymonthday=None, byyearday=None, byeaster=None, @@ -249,15 +348,18 @@ class rrule(rrulebase): self._freq = freq self._interval = interval self._count = count + if until and not isinstance(until, datetime.datetime): until = datetime.datetime.fromordinal(until.toordinal()) self._until = until + if wkst is None: self._wkst = calendar.firstweekday() elif isinstance(wkst, integer_types): self._wkst = wkst else: self._wkst = wkst.weekday + if bysetpos is None: self._bysetpos = None elif isinstance(bysetpos, integer_types): @@ -271,30 +373,36 @@ class rrule(rrulebase): if pos == 0 or not (-366 <= pos <= 366): raise ValueError("bysetpos must be between 1 and 366, " "or between -366 and -1") - if not (byweekno or byyearday or bymonthday or - byweekday is not None or byeaster is not None): + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): if freq == YEARLY: - if not bymonth: + if bymonth is None: bymonth = dtstart.month bymonthday = dtstart.day elif freq == MONTHLY: bymonthday = dtstart.day elif freq == WEEKLY: byweekday = dtstart.weekday() + # bymonth - if not bymonth: + if bymonth is None: self._bymonth = None - elif isinstance(bymonth, integer_types): - self._bymonth = (bymonth,) else: - self._bymonth = tuple(bymonth) + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = set(bymonth) + # byyearday - if not byyearday: + if byyearday is None: self._byyearday = None - elif isinstance(byyearday, integer_types): - self._byyearday = (byyearday,) else: - self._byyearday = tuple(byyearday) + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = set(byyearday) + # byeaster if byeaster is not None: if not easter: @@ -305,87 +413,104 @@ class rrule(rrulebase): self._byeaster = tuple(byeaster) else: self._byeaster = None + # bymonthay - if not bymonthday: + if bymonthday is None: self._bymonthday = () self._bynmonthday = () - elif isinstance(bymonthday, integer_types): - if bymonthday < 0: - self._bynmonthday = (bymonthday,) - self._bymonthday = () - else: - self._bymonthday = (bymonthday,) - self._bynmonthday = () else: - self._bymonthday = tuple([x for x in bymonthday if x > 0]) - self._bynmonthday = tuple([x for x in bymonthday if x < 0]) + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + self._bymonthday = set([x for x in bymonthday if x > 0]) + self._bynmonthday = set([x for x in bymonthday if x < 0]) + # byweekno if byweekno is None: self._byweekno = None - elif isinstance(byweekno, integer_types): - self._byweekno = (byweekno,) else: - self._byweekno = tuple(byweekno) + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = set(byweekno) + # byweekday / bynweekday if byweekday is None: self._byweekday = None self._bynweekday = None - elif isinstance(byweekday, integer_types): - self._byweekday = (byweekday,) - self._bynweekday = None - elif hasattr(byweekday, "n"): - if not byweekday.n or freq > MONTHLY: - self._byweekday = (byweekday.weekday,) - self._bynweekday = None - else: - self._bynweekday = ((byweekday.weekday, byweekday.n),) - self._byweekday = None else: - self._byweekday = [] - self._bynweekday = [] + if isinstance(byweekday, integer_types): + byweekday = (byweekday,) + elif hasattr(byweekday, "n"): + byweekday = (byweekday.weekday,) + + self._byweekday = set() + self._bynweekday = set() for wday in byweekday: if isinstance(wday, integer_types): - self._byweekday.append(wday) + self._byweekday.add(wday) elif not wday.n or freq > MONTHLY: - self._byweekday.append(wday.weekday) + self._byweekday.add(wday.weekday) else: - self._bynweekday.append((wday.weekday, wday.n)) - self._byweekday = tuple(self._byweekday) - self._bynweekday = tuple(self._bynweekday) + self._bynweekday.add((wday.weekday, wday.n)) + if not self._byweekday: self._byweekday = None elif not self._bynweekday: self._bynweekday = None + # byhour if byhour is None: if freq < HOURLY: - self._byhour = (dtstart.hour,) + self._byhour = set((dtstart.hour,)) else: self._byhour = None - elif isinstance(byhour, integer_types): - self._byhour = (byhour,) else: - self._byhour = tuple(byhour) + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + # byminute if byminute is None: if freq < MINUTELY: - self._byminute = (dtstart.minute,) + self._byminute = set((dtstart.minute,)) else: self._byminute = None - elif isinstance(byminute, integer_types): - self._byminute = (byminute,) else: - self._byminute = tuple(byminute) + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + # bysecond if bysecond is None: if freq < SECONDLY: - self._bysecond = (dtstart.second,) + self._bysecond = ((dtstart.second,)) else: self._bysecond = None - elif isinstance(bysecond, integer_types): - self._bysecond = (bysecond,) else: - self._bysecond = tuple(bysecond) + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) if self._freq >= HOURLY: self._timeset = None @@ -395,8 +520,8 @@ class rrule(rrulebase): for minute in self._byminute: for second in self._bysecond: self._timeset.append( - datetime.time(hour, minute, second, - tzinfo=self._tzinfo)) + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) self._timeset.sort() self._timeset = tuple(self._timeset) @@ -424,20 +549,20 @@ class rrule(rrulebase): ii = _iterinfo(self) ii.rebuild(year, month) - getdayset = {YEARLY:ii.ydayset, - MONTHLY:ii.mdayset, - WEEKLY:ii.wdayset, - DAILY:ii.ddayset, - HOURLY:ii.ddayset, - MINUTELY:ii.ddayset, - SECONDLY:ii.ddayset}[freq] - + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + if freq < HOURLY: timeset = self._timeset else: - gettimeset = {HOURLY:ii.htimeset, - MINUTELY:ii.mtimeset, - SECONDLY:ii.stimeset}[freq] + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] if ((freq >= HOURLY and self._byhour and hour not in self._byhour) or (freq >= MINUTELY and @@ -466,11 +591,10 @@ class rrule(rrulebase): ii.mdaymask[i] not in bymonthday and ii.nmdaymask[i] not in bynmonthday) or (byyearday and - ((i < ii.yearlen and i+1 not in byyearday - and -ii.yearlen+i not in byyearday) or - (i >= ii.yearlen and i+1-ii.yearlen not in byyearday - and -ii.nextyearlen+i-ii.yearlen - not in byyearday)))): + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): dayset[i] = None filtered = True @@ -484,7 +608,7 @@ class rrule(rrulebase): daypos, timepos = divmod(pos-1, len(timeset)) try: i = [x for x in dayset[start:end] - if x is not None][daypos] + if x is not None][daypos] time = timeset[timepos] except IndexError: pass @@ -559,60 +683,86 @@ class rrule(rrulebase): if filtered: # Jump to one iteration before next day hour += ((23-hour)//interval)*interval - while True: - hour += interval - div, mod = divmod(hour, 24) - if div: - hour = mod - day += div - fixday = True - if not byhour or hour in byhour: - break + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + timeset = gettimeset(hour, minute, second) elif freq == MINUTELY: if filtered: # Jump to one iteration before next day minute += ((1439-(hour*60+minute))//interval)*interval - while True: - minute += interval - div, mod = divmod(minute, 60) + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) if div: - minute = mod - hour += div - div, mod = divmod(hour, 24) - if div: - hour = mod - day += div - fixday = True - filtered = False - if ((not byhour or hour in byhour) and - (not byminute or minute in byminute)): + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + timeset = gettimeset(hour, minute, second) elif freq == SECONDLY: if filtered: # Jump to one iteration before next day second += (((86399-(hour*3600+minute*60+second)) - //interval)*interval) - while True: - second += self._interval - div, mod = divmod(second, 60) + // interval)*interval) + + rep_rate = (24*3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) if div: - second = mod - minute += div - div, mod = divmod(minute, 60) + hour += div + div, hour = divmod(hour, 24) if div: - minute = mod - hour += div - div, mod = divmod(hour, 24) - if div: - hour = mod - day += div - fixday = True + day += div + fixday = True + if ((not byhour or hour in byhour) and - (not byminute or minute in byminute) and - (not bysecond or second in bysecond)): + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + timeset = gettimeset(hour, minute, second) if fixday and day > 28: @@ -630,6 +780,80 @@ class rrule(rrulebase): daysinmonth = calendar.monthrange(year, month)[1] ii.rebuild(year, month) + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param:`start` specifies the starting position. + :param:`byxxx` is an iterable containing the list of allowed values. + :param:`base` is the largest allowable value for the specified + frequency (e.g. 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param:`value` is the old value of the component. + :param:`byxxx` is the `BYXXX` set, which should have been generated + by `rrule._construct_byset`, or something else which + checks that a valid rule is present. + :param:`base` is the largest allowable value for the specified + frequency (e.g. 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + class _iterinfo(object): __slots__ = ["rrule", "lastyear", "lastmonth", "yearlen", "nextyearlen", "yearordinal", "yearweekday", @@ -669,13 +893,13 @@ class _iterinfo(object): self.wnomask = None else: self.wnomask = [0]*(self.yearlen+7) - #no1wkst = firstwkst = self.wdaymask.index(rr._wkst) - no1wkst = firstwkst = (7-self.yearweekday+rr._wkst)%7 + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 if no1wkst >= 4: no1wkst = 0 # Number of days in the year, plus the days we got # from last year. - wyearlen = self.yearlen+(self.yearweekday-rr._wkst)%7 + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 else: # Number of days in the year, minus the days we # left in last year. @@ -721,22 +945,22 @@ class _iterinfo(object): # this year. if -1 not in rr._byweekno: lyearweekday = datetime.date(year-1, 1, 1).weekday() - lno1wkst = (7-lyearweekday+rr._wkst)%7 + lno1wkst = (7-lyearweekday+rr._wkst) % 7 lyearlen = 365+calendar.isleap(year-1) if lno1wkst >= 4: lno1wkst = 0 - lnumweeks = 52+(lyearlen+ - (lyearweekday-rr._wkst)%7)%7//4 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 else: - lnumweeks = 52+(self.yearlen-no1wkst)%7//4 + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 else: lnumweeks = -1 if lnumweeks in rr._byweekno: for i in range(no1wkst): self.wnomask[i] = 1 - if (rr._bynweekday and - (month != self.lastmonth or year != self.lastyear)): + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): ranges = [] if rr._freq == YEARLY: if rr._bymonth: @@ -755,10 +979,10 @@ class _iterinfo(object): for wday, n in rr._bynweekday: if n < 0: i = last+(n+1)*7 - i -= (self.wdaymask[i]-wday)%7 + i -= (self.wdaymask[i]-wday) % 7 else: i = first+(n-1)*7 - i += (7-self.wdaymask[i]+wday)%7 + i += (7-self.wdaymask[i]+wday) % 7 if first <= i <= last: self.nwdaymask[i] = 1 @@ -775,50 +999,50 @@ class _iterinfo(object): return list(range(self.yearlen)), 0, self.yearlen def mdayset(self, year, month, day): - set = [None]*self.yearlen + dset = [None]*self.yearlen start, end = self.mrange[month-1:month+1] for i in range(start, end): - set[i] = i - return set, start, end + dset[i] = i + return dset, start, end def wdayset(self, year, month, day): # We need to handle cross-year weeks here. - set = [None]*(self.yearlen+7) + dset = [None]*(self.yearlen+7) i = datetime.date(year, month, day).toordinal()-self.yearordinal start = i for j in range(7): - set[i] = i + dset[i] = i i += 1 - #if (not (0 <= i < self.yearlen) or + # if (not (0 <= i < self.yearlen) or # self.wdaymask[i] == self.rrule._wkst): # This will cross the year boundary, if necessary. if self.wdaymask[i] == self.rrule._wkst: break - return set, start, i + return dset, start, i def ddayset(self, year, month, day): - set = [None]*self.yearlen + dset = [None]*self.yearlen i = datetime.date(year, month, day).toordinal()-self.yearordinal - set[i] = i - return set, i, i+1 + dset[i] = i + return dset, i, i+1 def htimeset(self, hour, minute, second): - set = [] + tset = [] rr = self.rrule for minute in rr._byminute: for second in rr._bysecond: - set.append(datetime.time(hour, minute, second, + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) - set.sort() - return set + tset.sort() + return tset def mtimeset(self, hour, minute, second): - set = [] + tset = [] rr = self.rrule for second in rr._bysecond: - set.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) - set.sort() - return set + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset def stimeset(self, hour, minute, second): return (datetime.time(hour, minute, second, @@ -826,6 +1050,12 @@ class _iterinfo(object): class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ class _genitem(object): def __init__(self, genlist, gen): @@ -865,15 +1095,26 @@ class rruleset(rrulebase): self._exdate = [] def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ self._rrule.append(rrule) def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ self._rdate.append(rdate) def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ self._exrule.append(exrule) def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ self._exdate.append(exdate) def _iter(self): @@ -905,6 +1146,7 @@ class rruleset(rrulebase): rlist.sort() self._len = total + class _rrulestr(object): _freq_map = {"YEARLY": YEARLY, @@ -915,7 +1157,8 @@ class _rrulestr(object): "MINUTELY": MINUTELY, "SECONDLY": SECONDLY} - _weekday_map = {"MO":0,"TU":1,"WE":2,"TH":3,"FR":4,"SA":5,"SU":6} + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} def _handle_int(self, rrkwargs, name, value, **kwargs): rrkwargs[name.lower()] = int(value) @@ -923,17 +1166,17 @@ class _rrulestr(object): def _handle_int_list(self, rrkwargs, name, value, **kwargs): rrkwargs[name.lower()] = [int(x) for x in value.split(',')] - _handle_INTERVAL = _handle_int - _handle_COUNT = _handle_int - _handle_BYSETPOS = _handle_int_list - _handle_BYMONTH = _handle_int_list + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list _handle_BYMONTHDAY = _handle_int_list - _handle_BYYEARDAY = _handle_int_list - _handle_BYEASTER = _handle_int_list - _handle_BYWEEKNO = _handle_int_list - _handle_BYHOUR = _handle_int_list - _handle_BYMINUTE = _handle_int_list - _handle_BYSECOND = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list def _handle_FREQ(self, rrkwargs, name, value, **kwargs): rrkwargs["freq"] = self._freq_map[value] @@ -944,8 +1187,8 @@ class _rrulestr(object): from dateutil import parser try: rrkwargs["until"] = parser.parse(value, - ignoretz=kwargs.get("ignoretz"), - tzinfos=kwargs.get("tzinfos")) + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) except ValueError: raise ValueError("invalid until date") @@ -960,7 +1203,8 @@ class _rrulestr(object): break n = wday[:i] or None w = wday[i:] - if n: n = int(n) + if n: + n = int(n) l.append(weekdays[self._weekday_map[w]](n)) rrkwargs["byweekday"] = l @@ -1021,8 +1265,8 @@ class _rrulestr(object): i += 1 else: lines = s.split() - if (not forceset and len(lines) == 1 and - (s.find(':') == -1 or s.startswith('RRULE:'))): + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): return self._parse_rfc_rrule(lines[0], cache=cache, dtstart=dtstart, ignoretz=ignoretz, tzinfos=tzinfos) @@ -1071,32 +1315,32 @@ class _rrulestr(object): tzinfos=tzinfos) else: raise ValueError("unsupported property: "+name) - if (forceset or len(rrulevals) > 1 or - rdatevals or exrulevals or exdatevals): + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): if not parser and (rdatevals or exdatevals): from dateutil import parser - set = rruleset(cache=cache) + rset = rruleset(cache=cache) for value in rrulevals: - set.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in rdatevals: - for datestr in value.split(','): - set.rdate(parser.parse(datestr, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exrulevals: - set.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, ignoretz=ignoretz, tzinfos=tzinfos)) - for value in exdatevals: + for value in rdatevals: for datestr in value.split(','): - set.exdate(parser.parse(datestr, + rset.rdate(parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + for datestr in value.split(','): + rset.exdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) if compatible and dtstart: - set.rdate(dtstart) - return set + rset.rdate(dtstart) + return rset else: return self._parse_rfc_rrule(rrulevals[0], dtstart=dtstart, diff --git a/awx/lib/site-packages/dateutil/tz.py b/awx/lib/site-packages/dateutil/tz.py index e849fc24b5..31879e8be9 100644 --- a/awx/lib/site-packages/dateutil/tz.py +++ b/awx/lib/site-packages/dateutil/tz.py @@ -1,19 +1,25 @@ +# -*- coding: utf-8 -*- """ -Copyright (c) 2003-2007 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. +This module offers timezone implementations subclassing the abstract +:py:`datetime.tzinfo` type. There are classes to handle tzfile format files +(usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, etc), TZ +environment string (in all known formats), given ranges (with help from +relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. """ -__license__ = "Simplified BSD" - -from six import string_types, PY3 - import datetime import struct import time import sys import os +from six import string_types, PY3 + +try: + from dateutil.tzwin import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + relativedelta = None parser = None rrule = None @@ -21,10 +27,6 @@ rrule = None __all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"] -try: - from dateutil.tzwin import tzwin, tzwinlocal -except (ImportError, OSError): - tzwin, tzwinlocal = None, None def tzname_in_python2(myfunc): """Change unicode output into bytestrings in Python 2 @@ -42,11 +44,12 @@ def tzname_in_python2(myfunc): ZERO = datetime.timedelta(0) EPOCHORDINAL = datetime.datetime.utcfromtimestamp(0).toordinal() + class tzutc(datetime.tzinfo): def utcoffset(self, dt): return ZERO - + def dst(self, dt): return ZERO @@ -66,6 +69,7 @@ class tzutc(datetime.tzinfo): __reduce__ = object.__reduce__ + class tzoffset(datetime.tzinfo): def __init__(self, name, offset): @@ -96,6 +100,7 @@ class tzoffset(datetime.tzinfo): __reduce__ = object.__reduce__ + class tzlocal(datetime.tzinfo): _std_offset = datetime.timedelta(seconds=-time.timezone) @@ -123,25 +128,25 @@ class tzlocal(datetime.tzinfo): def _isdst(self, dt): # We can't use mktime here. It is unstable when deciding if # the hour near to a change is DST or not. - # + # # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, # dt.minute, dt.second, dt.weekday(), 0, -1)) # return time.localtime(timestamp).tm_isdst # # The code above yields the following result: # - #>>> import tz, datetime - #>>> t = tz.tzlocal() - #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - #'BRDT' - #>>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() - #'BRST' - #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - #'BRST' - #>>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() - #'BRDT' - #>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - #'BRDT' + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' # # Here is a more stable implementation: # @@ -166,6 +171,7 @@ class tzlocal(datetime.tzinfo): __reduce__ = object.__reduce__ + class _ttinfo(object): __slots__ = ["offset", "delta", "isdst", "abbr", "isstd", "isgmt"] @@ -205,15 +211,20 @@ class _ttinfo(object): if name in state: setattr(self, name, state[name]) + class tzfile(datetime.tzinfo): # http://www.twinsun.com/tz/tz-link.htm # ftp://ftp.iana.org/tz/tz*.tar.gz - - def __init__(self, fileobj): + + def __init__(self, fileobj, filename=None): + file_opened_here = False if isinstance(fileobj, string_types): self._filename = fileobj fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename elif hasattr(fileobj, "name"): self._filename = fileobj.name else: @@ -228,125 +239,128 @@ class tzfile(datetime.tzinfo): # six four-byte values of type long, written in a # ``standard'' byte order (the high-order byte # of the value is written first). + try: + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") - if fileobj.read(4).decode() != "TZif": - raise ValueError("magic not found") + fileobj.read(16) - fileobj.read(16) + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, - ( - # The number of UTC/local indicators stored in the file. - ttisgmtcnt, + # The number of standard/wall indicators stored in the file. + ttisstdcnt, - # The number of standard/wall indicators stored in the file. - ttisstdcnt, - - # The number of leap seconds for which data is - # stored in the file. - leapcnt, + # The number of leap seconds for which data is + # stored in the file. + leapcnt, - # The number of "transition times" for which data - # is stored in the file. - timecnt, + # The number of "transition times" for which data + # is stored in the file. + timecnt, - # The number of "local time types" for which data - # is stored in the file (must not be zero). - typecnt, + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, - # The number of characters of "time zone - # abbreviation strings" stored in the file. - charcnt, + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, - ) = struct.unpack(">6l", fileobj.read(24)) + ) = struct.unpack(">6l", fileobj.read(24)) - # The above header is followed by tzh_timecnt four-byte - # values of type long, sorted in ascending order. - # These values are written in ``standard'' byte order. - # Each is used as a transition time (as returned by - # time(2)) at which the rules for computing local time - # change. + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. - if timecnt: - self._trans_list = struct.unpack(">%dl" % timecnt, - fileobj.read(timecnt*4)) - else: - self._trans_list = [] + if timecnt: + self._trans_list = struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4)) + else: + self._trans_list = [] - # Next come tzh_timecnt one-byte values of type unsigned - # char; each one tells which of the different types of - # ``local time'' types described in the file is associated - # with the same-indexed transition time. These values - # serve as indices into an array of ttinfo structures that - # appears next in the file. - - if timecnt: - self._trans_idx = struct.unpack(">%dB" % timecnt, - fileobj.read(timecnt)) - else: - self._trans_idx = [] - - # Each ttinfo structure is written as a four-byte value - # for tt_gmtoff of type long, in a standard byte - # order, followed by a one-byte value for tt_isdst - # and a one-byte value for tt_abbrind. In each - # structure, tt_gmtoff gives the number of - # seconds to be added to UTC, tt_isdst tells whether - # tm_isdst should be set by localtime(3), and - # tt_abbrind serves as an index into the array of - # time zone abbreviation characters that follow the - # ttinfo structure(s) in the file. + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. - ttinfo = [] + if timecnt: + self._trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + self._trans_idx = [] - for i in range(typecnt): - ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. - abbr = fileobj.read(charcnt).decode() + ttinfo = [] - # Then there are tzh_leapcnt pairs of four-byte - # values, written in standard byte order; the - # first value of each pair gives the time (as - # returned by time(2)) at which a leap second - # occurs; the second gives the total number of - # leap seconds to be applied after the given time. - # The pairs of values are sorted in ascending order - # by time. + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) - # Not used, for now - if leapcnt: - leap = struct.unpack(">%dl" % (leapcnt*2), - fileobj.read(leapcnt*8)) + abbr = fileobj.read(charcnt).decode() - # Then there are tzh_ttisstdcnt standard/wall - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as standard - # time or wall clock time, and are used when - # a time zone file is used in handling POSIX-style - # time zone environment variables. + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. - if ttisstdcnt: - isstd = struct.unpack(">%db" % ttisstdcnt, - fileobj.read(ttisstdcnt)) + # Not used, for now + # if leapcnt: + # leap = struct.unpack(">%dl" % (leapcnt*2), + # fileobj.read(leapcnt*8)) - # Finally, there are tzh_ttisgmtcnt UTC/local - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as UTC or - # local time, and are used when a time zone file - # is used in handling POSIX-style time zone envi- - # ronment variables. + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. - if ttisgmtcnt: - isgmt = struct.unpack(">%db" % ttisgmtcnt, - fileobj.read(ttisgmtcnt)) + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) - # ** Everything has been read ** + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # ** Everything has been read ** + finally: + if file_opened_here: + fileobj.close() # Build ttinfo list self._ttinfo_list = [] for i in range(typecnt): - gmtoff, isdst, abbrind = ttinfo[i] + gmtoff, isdst, abbrind = ttinfo[i] # Round to full-minutes if that's not the case. Python's # datetime doesn't accept sub-minute timezones. Check # http://python.org/sf/1447945 for some information. @@ -464,7 +478,7 @@ class tzfile(datetime.tzinfo): # However, this class stores historical changes in the # dst offset, so I belive that this wouldn't be the right # way to implement this. - + @tzname_in_python2 def tzname(self, dt): if not self._ttinfo_std: @@ -481,7 +495,6 @@ class tzfile(datetime.tzinfo): def __ne__(self, other): return not self.__eq__(other) - def __repr__(self): return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) @@ -490,8 +503,8 @@ class tzfile(datetime.tzinfo): raise ValueError("Unpickable %s class" % self.__class__.__name__) return (self.__class__, (self._filename,)) -class tzrange(datetime.tzinfo): +class tzrange(datetime.tzinfo): def __init__(self, stdabbr, stdoffset=None, dstabbr=None, dstoffset=None, start=None, end=None): @@ -512,12 +525,12 @@ class tzrange(datetime.tzinfo): self._dst_offset = ZERO if dstabbr and start is None: self._start_delta = relativedelta.relativedelta( - hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) else: self._start_delta = start if dstabbr and end is None: self._end_delta = relativedelta.relativedelta( - hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) else: self._end_delta = end @@ -570,8 +583,9 @@ class tzrange(datetime.tzinfo): __reduce__ = object.__reduce__ + class tzstr(tzrange): - + def __init__(self, s): global parser if not parser: @@ -645,9 +659,10 @@ class tzstr(tzrange): def __repr__(self): return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + class _tzicalvtzcomp(object): def __init__(self, tzoffsetfrom, tzoffsetto, isdst, - tzname=None, rrule=None): + tzname=None, rrule=None): self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) self.tzoffsetdiff = self.tzoffsetto-self.tzoffsetfrom @@ -655,6 +670,7 @@ class _tzicalvtzcomp(object): self.tzname = tzname self.rrule = rrule + class _tzicalvtz(datetime.tzinfo): def __init__(self, tzid, comps=[]): self._tzid = tzid @@ -718,6 +734,7 @@ class _tzicalvtz(datetime.tzinfo): __reduce__ = object.__reduce__ + class tzical(object): def __init__(self, fileobj): global rrule @@ -726,7 +743,8 @@ class tzical(object): if isinstance(fileobj, string_types): self._s = fileobj - fileobj = open(fileobj, 'r') # ical should be encoded in UTF-8 with CRLF + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') elif hasattr(fileobj, "name"): self._s = fileobj.name else: @@ -754,7 +772,7 @@ class tzical(object): if not s: raise ValueError("empty offset") if s[0] in ('+', '-'): - signal = (-1, +1)[s[0]=='+'] + signal = (-1, +1)[s[0] == '+'] s = s[1:] else: signal = +1 @@ -815,7 +833,8 @@ class tzical(object): if not tzid: raise ValueError("mandatory TZID not found") if not comps: - raise ValueError("at least one component is needed") + raise ValueError( + "at least one component is needed") # Process vtimezone self._vtz[tzid] = _tzicalvtz(tzid, comps) invtz = False @@ -823,9 +842,11 @@ class tzical(object): if not founddtstart: raise ValueError("mandatory DTSTART not found") if tzoffsetfrom is None: - raise ValueError("mandatory TZOFFSETFROM not found") + raise ValueError( + "mandatory TZOFFSETFROM not found") if tzoffsetto is None: - raise ValueError("mandatory TZOFFSETFROM not found") + raise ValueError( + "mandatory TZOFFSETFROM not found") # Process component rr = None if rrulelines: @@ -848,15 +869,18 @@ class tzical(object): rrulelines.append(line) elif name == "TZOFFSETFROM": if parms: - raise ValueError("unsupported %s parm: %s "%(name, parms[0])) + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) tzoffsetfrom = self._parse_offset(value) elif name == "TZOFFSETTO": if parms: - raise ValueError("unsupported TZOFFSETTO parm: "+parms[0]) + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) tzoffsetto = self._parse_offset(value) elif name == "TZNAME": if parms: - raise ValueError("unsupported TZNAME parm: "+parms[0]) + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) tzname = value elif name == "COMMENT": pass @@ -865,7 +889,8 @@ class tzical(object): else: if name == "TZID": if parms: - raise ValueError("unsupported TZID parm: "+parms[0]) + raise ValueError( + "unsupported TZID parm: "+parms[0]) tzid = value elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): pass @@ -886,6 +911,7 @@ else: TZFILES = [] TZPATHS = [] + def gettz(name=None): tz = None if not name: @@ -933,11 +959,11 @@ def gettz(name=None): pass else: tz = None - if tzwin: + if tzwin is not None: try: tz = tzwin(name) - except OSError: - pass + except WindowsError: + tz = None if not tz: from dateutil.zoneinfo import gettz tz = gettz(name) diff --git a/awx/lib/site-packages/dateutil/tzwin.py b/awx/lib/site-packages/dateutil/tzwin.py index 041c6cc3d6..e8a82d7587 100644 --- a/awx/lib/site-packages/dateutil/tzwin.py +++ b/awx/lib/site-packages/dateutil/tzwin.py @@ -1,8 +1,8 @@ # This code was originally contributed by Jeffrey Harris. import datetime import struct -import winreg +from six.moves import winreg __all__ = ["tzwin", "tzwinlocal"] @@ -12,8 +12,8 @@ TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + def _settzkeyname(): - global TZKEYNAME handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) try: winreg.OpenKey(handle, TZKEYNAMENT).Close() @@ -21,8 +21,10 @@ def _settzkeyname(): except WindowsError: TZKEYNAME = TZKEYNAME9X handle.Close() + return TZKEYNAME + +TZKEYNAME = _settzkeyname() -_settzkeyname() class tzwinbase(datetime.tzinfo): """tzinfo class based on win32's timezones available in the registry.""" @@ -39,7 +41,7 @@ class tzwinbase(datetime.tzinfo): return datetime.timedelta(minutes=minutes) else: return datetime.timedelta(0) - + def tzname(self, dt): if self._isdst(dt): return self._dstname @@ -59,8 +61,11 @@ class tzwinbase(datetime.tzinfo): def display(self): return self._display - + def _isdst(self, dt): + if not self._dstmonth: + # dstmonth == 0 signals the zone has no daylight saving time + return False dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek, self._dsthour, self._dstminute, self._dstweeknumber) @@ -78,31 +83,33 @@ class tzwin(tzwinbase): def __init__(self, name): self._name = name - handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - tzkey = winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name)) - keydict = valuestodict(tzkey) - tzkey.Close() - handle.Close() + # multiple contexts only possible in 2.7 and 3.1, we still support 2.6 + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, + "%s\%s" % (TZKEYNAME, name)) as tzkey: + keydict = valuestodict(tzkey) self._stdname = keydict["Std"].encode("iso-8859-1") self._dstname = keydict["Dlt"].encode("iso-8859-1") self._display = keydict["Display"] - + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm tup = struct.unpack("=3l16h", keydict["TZI"]) - self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 - self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1 - + self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1 + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx (self._stdmonth, - self._stddayofweek, # Sunday = 0 - self._stdweeknumber, # Last = 5 + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 self._stdhour, self._stdminute) = tup[4:9] (self._dstmonth, - self._dstdayofweek, # Sunday = 0 - self._dstweeknumber, # Last = 5 + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 self._dsthour, self._dstminute) = tup[12:17] @@ -114,61 +121,59 @@ class tzwin(tzwinbase): class tzwinlocal(tzwinbase): - + def __init__(self): - handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - tzlocalkey = winreg.OpenKey(handle, TZLOCALKEYNAME) - keydict = valuestodict(tzlocalkey) - tzlocalkey.Close() + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) - self._stdname = keydict["StandardName"].encode("iso-8859-1") - self._dstname = keydict["DaylightName"].encode("iso-8859-1") + self._stdname = keydict["StandardName"].encode("iso-8859-1") + self._dstname = keydict["DaylightName"].encode("iso-8859-1") - try: - tzkey = winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname)) - _keydict = valuestodict(tzkey) - self._display = _keydict["Display"] - tzkey.Close() - except OSError: - self._display = None + try: + with winreg.OpenKey( + handle, "%s\%s" % (TZKEYNAME, self._stdname)) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None - handle.Close() - self._stdoffset = -keydict["Bias"]-keydict["StandardBias"] self._dstoffset = self._stdoffset-keydict["DaylightBias"] - # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm tup = struct.unpack("=8h", keydict["StandardStart"]) (self._stdmonth, - self._stddayofweek, # Sunday = 0 - self._stdweeknumber, # Last = 5 + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 self._stdhour, self._stdminute) = tup[1:6] tup = struct.unpack("=8h", keydict["DaylightStart"]) (self._dstmonth, - self._dstdayofweek, # Sunday = 0 - self._dstweeknumber, # Last = 5 + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 self._dsthour, self._dstminute) = tup[1:6] def __reduce__(self): return (self.__class__, ()) + def picknthweekday(year, month, dayofweek, hour, minute, whichweek): """dayofweek == 0 means Sunday, whichweek 5 means last instance""" first = datetime.datetime(year, month, 1, hour, minute) - weekdayone = first.replace(day=((dayofweek-first.isoweekday())%7+1)) + weekdayone = first.replace(day=((dayofweek-first.isoweekday()) % 7+1)) for n in range(whichweek): dt = weekdayone+(whichweek-n)*ONEWEEK if dt.month == month: return dt + def valuestodict(key): """Convert a registry key's values to a dictionary.""" dict = {} diff --git a/awx/lib/site-packages/dateutil/zoneinfo/__init__.py b/awx/lib/site-packages/dateutil/zoneinfo/__init__.py index 81db1405b1..53d9dfee0c 100644 --- a/awx/lib/site-packages/dateutil/zoneinfo/__init__.py +++ b/awx/lib/site-packages/dateutil/zoneinfo/__init__.py @@ -1,109 +1,108 @@ # -*- coding: utf-8 -*- -""" -Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net> - -This module offers extensions to the standard Python -datetime module. -""" import logging import os -from subprocess import call +import warnings +import tempfile +import shutil +from subprocess import check_call from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO +from contextlib import closing from dateutil.tz import tzfile -__author__ = "Tomi Pieviläinen <tomi.pievilainen@iki.fi>" -__license__ = "Simplified BSD" - __all__ = ["setcachesize", "gettz", "rebuild"] -CACHE = [] -CACHESIZE = 10 +_ZONEFILENAME = "dateutil-zoneinfo.tar.gz" + +# python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but +# it's close enough for python2.6 +_tar_open = TarFile.open +if not hasattr(TarFile, '__exit__'): + def _tar_open(*args, **kwargs): + return closing(TarFile.open(*args, **kwargs)) + class tzfile(tzfile): def __reduce__(self): return (gettz, (self._filename,)) -def getzoneinfofile(): - filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__)))) - filenames.reverse() - for entry in filenames: - if entry.startswith("zoneinfo") and ".tar." in entry: - return os.path.join(os.path.dirname(__file__), entry) - return None -ZONEINFOFILE = getzoneinfofile() +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, _ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None -del getzoneinfofile -def setcachesize(size): - global CACHESIZE, CACHE - CACHESIZE = size - del CACHE[size:] +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with _tar_open(fileobj=zonefile_stream, mode='r') as tf: + # dict comprehension does not work on python2.6 + # TODO: get back to the nicer syntax when we ditch python2.6 + # self.zones = {zf.name: tzfile(tf.extractfile(zf), + # filename = zf.name) + # for zf in tf.getmembers() if zf.isfile()} + self.zones = dict((zf.name, tzfile(tf.extractfile(zf), + filename=zf.name)) + for zf in tf.getmembers() if zf.isfile()) + # deal with links: They'll point to their parent object. Less + # waste of memory + # links = {zl.name: self.zones[zl.linkname] + # for zl in tf.getmembers() if zl.islnk() or zl.issym()} + links = dict((zl.name, self.zones[zl.linkname]) + for zl in tf.getmembers() if + zl.islnk() or zl.issym()) + self.zones.update(links) + else: + self.zones = dict() + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: deprecate this. +_CLASS_ZONE_INSTANCE = list() + def gettz(name): - tzinfo = None - if ZONEINFOFILE: - for cachedname, tzinfo in CACHE: - if cachedname == name: - break - else: - tf = TarFile.open(ZONEINFOFILE) - try: - zonefile = tf.extractfile(name) - except KeyError: - tzinfo = None - else: - tzinfo = tzfile(zonefile) - tf.close() - CACHE.insert(0, (name, tzinfo)) - del CACHE[CACHESIZE:] - return tzinfo + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) -def rebuild(filename, tag=None, format="gz"): + +def rebuild(filename, tag=None, format="gz", zonegroups=[]): """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* filename is the timezone tarball from ftp.iana.org/tz. """ - import tempfile, shutil tmpdir = tempfile.mkdtemp() zonedir = os.path.join(tmpdir, "zoneinfo") moduledir = os.path.dirname(__file__) - if tag: tag = "-"+tag - targetname = "zoneinfo%s.tar.%s" % (tag, format) try: - tf = TarFile.open(filename) - # The "backwards" zone file contains links to other files, so must be - # processed as last - for name in sorted(tf.getnames(), - key=lambda k: k != "backward" and k or "z"): - if not (name.endswith(".sh") or - name.endswith(".tab") or - name == "leapseconds"): + with _tar_open(filename) as tf: + for name in zonegroups: tf.extract(name, tmpdir) - filepath = os.path.join(tmpdir, name) - try: - # zic will return errors for nontz files in the package - # such as the Makefile or README, so check_call cannot - # be used (or at least extra checks would be needed) - call(["zic", "-d", zonedir, filepath]) - except OSError as e: - if e.errno == 2: - logging.error( - "Could not find zic. Perhaps you need to install " - "libc-bin or some other package that provides it, " - "or it's not in your PATH?") + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") raise - tf.close() - target = os.path.join(moduledir, targetname) - for entry in os.listdir(moduledir): - if entry.startswith("zoneinfo") and ".tar." in entry: - os.unlink(os.path.join(moduledir, entry)) - tf = TarFile.open(target, "w:%s" % format) - for entry in os.listdir(zonedir): - entrypath = os.path.join(zonedir, entry) - tf.add(entrypath, entry) - tf.close() + target = os.path.join(moduledir, _ZONEFILENAME) + with _tar_open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) finally: shutil.rmtree(tmpdir) diff --git a/awx/lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/awx/lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000000..4c95eda3e4 Binary files /dev/null and b/awx/lib/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz b/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz deleted file mode 100644 index 039d06f979..0000000000 Binary files a/awx/lib/site-packages/dateutil/zoneinfo/zoneinfo--latest.tar.gz and /dev/null differ diff --git a/awx/lib/site-packages/keyring/backend.py b/awx/lib/site-packages/keyring/backend.py index 2563829039..1161774eae 100644 --- a/awx/lib/site-packages/keyring/backend.py +++ b/awx/lib/site-packages/keyring/backend.py @@ -5,17 +5,27 @@ Keyring implementation support from __future__ import absolute_import import abc +import logging try: import importlib except ImportError: pass +try: + import pkg_resources +except ImportError: + pass + from . import errors, util from . import backends from .util import properties from .py27compat import add_metaclass, filter + +log = logging.getLogger(__name__) + + class KeyringBackendMeta(abc.ABCMeta): """ A metaclass that's both an ABCMeta and a type that keeps a registry of @@ -127,6 +137,38 @@ def _load_backends(): backends = ('file', 'Gnome', 'Google', 'keyczar', 'kwallet', 'multi', 'OS_X', 'pyfs', 'SecretService', 'Windows') list(map(_load_backend, backends)) + _load_plugins() + +def _load_plugins(): + """ + Locate all setuptools entry points by the name 'keyring backends' + and initialize them. + Any third-party library may register an entry point by adding the + following to their setup.py:: + + entry_points = { + 'keyring backends': [ + 'plugin_name = mylib.mymodule:initialize_func', + ], + }, + + `plugin_name` can be anything, and is only used to display the name + of the plugin at initialization time. + + `initialize_func` is optional, but will be invoked if callable. + """ + if 'pkg_resources' not in globals(): + return + group = 'keyring backends' + entry_points = pkg_resources.iter_entry_points(group=group) + for ep in entry_points: + try: + log.info('Loading %s', ep.name) + init_func = ep.load() + if callable(init_func): + init_func() + except Exception: + log.exception("Error initializing plugin %s." % ep) @util.once def get_all_keyring(): diff --git a/awx/lib/site-packages/markdown/__init__.py b/awx/lib/site-packages/markdown/__init__.py index 4943388ed0..3ea8e05ec7 100644 --- a/awx/lib/site-packages/markdown/__init__.py +++ b/awx/lib/site-packages/markdown/__init__.py @@ -10,7 +10,7 @@ called from the command line. import markdown html = markdown.markdown(your_text_string) -See <http://packages.python.org/Markdown/> for more +See <https://pythonhosted.org/Markdown/> for more information and instructions on how to extend the functionality of Python Markdown. Read that before you try modifying this file. @@ -36,6 +36,8 @@ from .__version__ import version, version_info import codecs import sys import logging +import warnings +import importlib from . import util from .preprocessors import build_preprocessors from .blockprocessors import build_block_parser @@ -48,6 +50,7 @@ from .serializers import to_html_string, to_xhtml_string __all__ = ['Markdown', 'markdown', 'markdownFromFile'] logger = logging.getLogger('MARKDOWN') +logging.captureWarnings(True) class Markdown(object): @@ -96,8 +99,8 @@ class Markdown(object): Note that it is suggested that the more specific formats ("xhtml1" and "html4") be used as "xhtml" or "html" may change in the future if it makes sense at that time. - * safe_mode: Disallow raw html. One of "remove", "replace" or "escape". - * html_replacement_text: Text used when safe_mode is set to "replace". + * safe_mode: Deprecated! Disallow raw html. One of "remove", "replace" or "escape". + * html_replacement_text: Deprecated! Text used when safe_mode is set to "replace". * tab_length: Length of tabs in the source. Default: 4 * enable_attributes: Enable the conversion of attributes. Default: True * smart_emphasis: Treat `_connected_words_` intelligently Default: True @@ -107,14 +110,16 @@ class Markdown(object): # For backward compatibility, loop through old positional args pos = ['extensions', 'extension_configs', 'safe_mode', 'output_format'] - c = 0 - for arg in args: + for c, arg in enumerate(args): if pos[c] not in kwargs: kwargs[pos[c]] = arg - c += 1 - if c == len(pos): + if c+1 == len(pos): #pragma: no cover # ignore any additional args break + if len(args): + warnings.warn('Positional arguments are pending depreacted in Markdown ' + 'and will be deprecated in version 2.6. Use keyword ' + 'arguments only.', PendingDeprecationWarning) # Loop through kwargs and assign defaults for option, default in self.option_defaults.items(): @@ -125,6 +130,18 @@ class Markdown(object): # Disable attributes in safeMode when not explicitly set self.enable_attributes = False + if 'safe_mode' in kwargs: + warnings.warn('"safe_mode" is pending deprecation in Python-Markdown ' + 'and will be deprecated in version 2.6. Use an HTML ' + 'sanitizer (like Bleach http://bleach.readthedocs.org/) ' + 'if you are parsing untrusted markdown text. See the ' + '2.5 release notes for more info', PendingDeprecationWarning) + + if 'html_replacement_text' in kwargs: + warnings.warn('The "html_replacement_text" keyword is pending deprecation ' + 'in Python-Markdown and will be deprecated in version 2.6 ' + 'along with "safe_mode".', PendingDeprecationWarning) + self.registeredExtensions = [] self.docType = "" self.stripTopLevelTags = True @@ -160,9 +177,11 @@ class Markdown(object): """ for ext in extensions: if isinstance(ext, util.string_type): - ext = self.build_extension(ext, configs.get(ext, [])) + ext = self.build_extension(ext, configs.get(ext, {})) if isinstance(ext, Extension): ext.extendMarkdown(self, globals()) + logger.debug('Successfully loaded extension "%s.%s".' + % (ext.__class__.__module__, ext.__class__.__name__)) elif ext is not None: raise TypeError( 'Extension "%s.%s" must be of type: "markdown.Extension"' @@ -170,52 +189,87 @@ class Markdown(object): return self - def build_extension(self, ext_name, configs = []): + def build_extension(self, ext_name, configs): """Build extension by name, then return the module. The extension name may contain arguments as part of the string in the following format: "extname(key1=value1,key2=value2)" """ - - # Parse extensions config params (ignore the order) + configs = dict(configs) + + # Parse extensions config params (ignore the order) pos = ext_name.find("(") # find the first "(" if pos > 0: ext_args = ext_name[pos+1:-1] ext_name = ext_name[:pos] pairs = [x.split("=") for x in ext_args.split(",")] configs.update([(x.strip(), y.strip()) for (x, y) in pairs]) + warnings.warn('Setting configs in the Named Extension string is pending deprecation. ' + 'It is recommended that you pass an instance of the extension class to ' + 'Markdown or use the "extension_configs" keyword. The current behavior ' + 'will be deprecated in version 2.6 and raise an error in version 2.7. ' + 'See the Release Notes for Python-Markdown version 2.5 for more info.', + PendingDeprecationWarning) - # Setup the module name - module_name = ext_name - if '.' not in ext_name: - module_name = '.'.join(['markdown.extensions', ext_name]) + # Get class name (if provided): `path.to.module:ClassName` + ext_name, class_name = ext_name.split(':', 1) if ':' in ext_name else (ext_name, '') # Try loading the extension first from one place, then another - try: # New style (markdown.extensions.<extension>) - module = __import__(module_name, {}, {}, [module_name.rpartition('.')[0]]) + try: + # Assume string uses dot syntax (`path.to.some.module`) + module = importlib.import_module(ext_name) + logger.debug('Successfuly imported extension module "%s".' % ext_name) + # For backward compat (until deprecation) check that this is an extension + if '.' not in ext_name and not (hasattr(module, 'extendMarkdown') or (class_name and hasattr(module, class_name))): + # We have a name conflict (eg: extensions=['tables'] and PyTables is installed) + raise ImportError except ImportError: - module_name_old_style = '_'.join(['mdx', ext_name]) - try: # Old style (mdx_<extension>) - module = __import__(module_name_old_style) - except ImportError as e: - message = "Failed loading extension '%s' from '%s' or '%s'" \ - % (ext_name, module_name, module_name_old_style) + # Preppend `markdown.extensions.` to name + module_name = '.'.join(['markdown.extensions', ext_name]) + try: + module = importlib.import_module(module_name) + logger.debug('Successfuly imported extension module "%s".' % module_name) + warnings.warn('Using short names for Markdown\'s builtin extensions is pending deprecation. ' + 'Use the full path to the extension with Python\'s dot notation ' + '(eg: "%s" instead of "%s"). The current behavior will be deprecated in ' + 'version 2.6 and raise an error in version 2.7. See the Release Notes for ' + 'Python-Markdown version 2.5 for more info.' % (module_name, ext_name), + PendingDeprecationWarning) + except ImportError: + # Preppend `mdx_` to name + module_name_old_style = '_'.join(['mdx', ext_name]) + try: + module = importlib.import_module(module_name_old_style) + logger.debug('Successfuly imported extension module "%s".' % module_name_old_style) + warnings.warn('Markdown\'s behavuor of appending "mdx_" to an extension name ' + 'is pending deprecation. Use the full path to the extension with ' + 'Python\'s dot notation (eg: "%s" instead of "%s"). The ' + 'current behavior will be deprecated in version 2.6 and raise an ' + 'error in version 2.7. See the Release Notes for Python-Markdown ' + 'version 2.5 for more info.' % (module_name_old_style, ext_name), + PendingDeprecationWarning) + except ImportError as e: + message = "Failed loading extension '%s' from '%s', '%s' or '%s'" \ + % (ext_name, ext_name, module_name, module_name_old_style) + e.args = (message,) + e.args[1:] + raise + + if class_name: + # Load given class name from module. + return getattr(module, class_name)(**configs) + else: + # Expect makeExtension() function to return a class. + try: + return module.makeExtension(**configs) + except AttributeError as e: + message = e.args[0] + message = "Failed to initiate extension " \ + "'%s': %s" % (ext_name, message) e.args = (message,) + e.args[1:] raise - # If the module is loaded successfully, we expect it to define a - # function called makeExtension() - try: - return module.makeExtension(configs.items()) - except AttributeError as e: - message = e.args[0] - message = "Failed to initiate extension " \ - "'%s': %s" % (ext_name, message) - e.args = (message,) + e.args[1:] - raise - def registerExtension(self, extension): """ This gets called by the extension """ self.registeredExtensions.append(extension) @@ -303,7 +357,7 @@ class Markdown(object): start = output.index('<%s>'%self.doc_tag)+len(self.doc_tag)+2 end = output.rindex('</%s>'%self.doc_tag) output = output[start:end].strip() - except ValueError: + except ValueError: #pragma: no cover if output.strip().endswith('<%s />'%self.doc_tag): # We have an empty document output = '' @@ -434,6 +488,10 @@ def markdownFromFile(*args, **kwargs): c += 1 if c == len(pos): break + if len(args): + warnings.warn('Positional arguments are pending depreacted in Markdown ' + 'and will be deprecated in version 2.6. Use keyword ' + 'arguments only.', PendingDeprecationWarning) md = Markdown(**kwargs) md.convertFile(kwargs.get('input', None), diff --git a/awx/lib/site-packages/markdown/__main__.py b/awx/lib/site-packages/markdown/__main__.py index 8ee8c8222e..d085540bce 100644 --- a/awx/lib/site-packages/markdown/__main__.py +++ b/awx/lib/site-packages/markdown/__main__.py @@ -7,20 +7,25 @@ COMMAND-LINE SPECIFIC STUFF import markdown import sys import optparse +import codecs +try: + import yaml +except ImportError: #pragma: no cover + import json as yaml import logging from logging import DEBUG, INFO, CRITICAL logger = logging.getLogger('MARKDOWN') -def parse_options(): +def parse_options(args=None, values=None): """ Define and parse `optparse` options for command-line usage. """ usage = """%prog [options] [INPUTFILE] (STDIN is assumed if no INPUTFILE is given)""" desc = "A Python implementation of John Gruber's Markdown. " \ - "http://packages.python.org/Markdown/" + "https://pythonhosted.org/Markdown/" ver = "%%prog %s" % markdown.version parser = optparse.OptionParser(usage=usage, description=desc, version=ver) @@ -29,28 +34,36 @@ def parse_options(): metavar="OUTPUT_FILE") parser.add_option("-e", "--encoding", dest="encoding", help="Encoding for input and output files.",) + parser.add_option("-s", "--safe", dest="safe", default=False, + metavar="SAFE_MODE", + help="Deprecated! 'replace', 'remove' or 'escape' HTML tags in input") + parser.add_option("-o", "--output_format", dest="output_format", + default='xhtml1', metavar="OUTPUT_FORMAT", + help="'xhtml1' (default), 'html4' or 'html5'.") + parser.add_option("-n", "--no_lazy_ol", dest="lazy_ol", + action='store_false', default=True, + help="Observe number of first item of ordered lists.") + parser.add_option("-x", "--extension", action="append", dest="extensions", + help = "Load extension EXTENSION.", metavar="EXTENSION") + parser.add_option("-c", "--extension_configs", dest="configfile", default=None, + help="Read extension configurations from CONFIG_FILE. " + "CONFIG_FILE must be of JSON or YAML format. YAML format requires " + "that a python YAML library be installed. The parsed JSON or YAML " + "must result in a python dictionary which would be accepted by the " + "'extension_configs' keyword on the markdown.Markdown class. " + "The extensions must also be loaded with the `--extension` option.", + metavar="CONFIG_FILE") parser.add_option("-q", "--quiet", default = CRITICAL, action="store_const", const=CRITICAL+10, dest="verbose", help="Suppress all warnings.") parser.add_option("-v", "--verbose", action="store_const", const=INFO, dest="verbose", help="Print all warnings.") - parser.add_option("-s", "--safe", dest="safe", default=False, - metavar="SAFE_MODE", - help="'replace', 'remove' or 'escape' HTML tags in input") - parser.add_option("-o", "--output_format", dest="output_format", - default='xhtml1', metavar="OUTPUT_FORMAT", - help="'xhtml1' (default), 'html4' or 'html5'.") parser.add_option("--noisy", action="store_const", const=DEBUG, dest="verbose", help="Print debug messages.") - parser.add_option("-x", "--extension", action="append", dest="extensions", - help = "Load extension EXTENSION.", metavar="EXTENSION") - parser.add_option("-n", "--no_lazy_ol", dest="lazy_ol", - action='store_false', default=True, - help="Observe number of first item of ordered lists.") - (options, args) = parser.parse_args() + (options, args) = parser.parse_args(args, values) if len(args) == 0: input_file = None @@ -60,15 +73,26 @@ def parse_options(): if not options.extensions: options.extensions = [] + extension_configs = {} + if options.configfile: + with codecs.open(options.configfile, mode="r", encoding=options.encoding) as fp: + try: + extension_configs = yaml.load(fp) + except Exception as e: + message = "Failed parsing extension config file: %s" % options.configfile + e.args = (message,) + e.args[1:] + raise + return {'input': input_file, 'output': options.filename, 'safe_mode': options.safe, 'extensions': options.extensions, + 'extension_configs': extension_configs, 'encoding': options.encoding, 'output_format': options.output_format, 'lazy_ol': options.lazy_ol}, options.verbose -def run(): +def run(): #pragma: no cover """Run Markdown from the command line.""" # Parse options and adjust logging level if necessary @@ -80,7 +104,7 @@ def run(): # Run markdown.markdownFromFile(**options) -if __name__ == '__main__': +if __name__ == '__main__': #pragma: no cover # Support running module as a commandline command. # Python 2.5 & 2.6 do: `python -m markdown.__main__ [options] [args]`. # Python 2.7 & 3.x do: `python -m markdown [options] [args]`. diff --git a/awx/lib/site-packages/markdown/__version__.py b/awx/lib/site-packages/markdown/__version__.py index 2132ce6631..397dfb3491 100644 --- a/awx/lib/site-packages/markdown/__version__.py +++ b/awx/lib/site-packages/markdown/__version__.py @@ -5,7 +5,7 @@ # (major, minor, micro, alpha/beta/rc/final, #) # (1, 1, 2, 'alpha', 0) => "1.1.2.dev" # (1, 2, 0, 'beta', 2) => "1.2b2" -version_info = (2, 4, 1, 'final', 0) +version_info = (2, 5, 2, 'final', 0) def _get_version(): " Returns a PEP 386-compliant version number from version_info. " diff --git a/awx/lib/site-packages/markdown/blockprocessors.py b/awx/lib/site-packages/markdown/blockprocessors.py index 147ff0ff9a..08fbcf86b3 100644 --- a/awx/lib/site-packages/markdown/blockprocessors.py +++ b/awx/lib/site-packages/markdown/blockprocessors.py @@ -99,7 +99,7 @@ class BlockProcessor: * ``block``: A block of text from the source which has been split at blank lines. """ - pass + pass #pragma: no cover def run(self, parent, blocks): """ Run processor. Must be overridden by subclasses. @@ -123,7 +123,7 @@ class BlockProcessor: * ``parent``: A etree element which is the parent of the current block. * ``blocks``: A list of all remaining blocks of the document. """ - pass + pass #pragma: no cover class ListIndentProcessor(BlockProcessor): @@ -433,7 +433,7 @@ class HashHeaderProcessor(BlockProcessor): if after: # Insert remaining lines as first block for future parsing. blocks.insert(0, after) - else: + else: #pragma: no cover # This should never happen, but just in case... logger.warn("We've got a problem header: %r" % block) diff --git a/awx/lib/site-packages/markdown/extensions/__init__.py b/awx/lib/site-packages/markdown/extensions/__init__.py index 184c4d1b1d..03b2a4cf32 100644 --- a/awx/lib/site-packages/markdown/extensions/__init__.py +++ b/awx/lib/site-packages/markdown/extensions/__init__.py @@ -4,17 +4,45 @@ Extensions """ from __future__ import unicode_literals +from ..util import parseBoolValue +import warnings class Extension(object): """ Base class for extensions to subclass. """ - def __init__(self, configs = {}): - """Create an instance of an Extention. + + # Default config -- to be overriden by a subclass + # Must be of the following format: + # { + # 'key': ['value', 'description'] + # } + # Note that Extension.setConfig will raise a KeyError + # if a default is not set here. + config = {} + + def __init__(self, *args, **kwargs): + """ Initiate Extension and set up configs. """ - Keyword arguments: - - * configs: A dict of configuration setting used by an Extension. - """ - self.config = configs + # check for configs arg for backward compat. + # (there only ever used to be one so we use arg[0]) + if len(args): + self.setConfigs(args[0]) + warnings.warn('Extension classes accepting positional args is pending Deprecation. ' + 'Each setting should be passed into the Class as a keyword. Positional ' + 'args will be deprecated in version 2.6 and raise an error in version ' + '2.7. See the Release Notes for Python-Markdown version 2.5 for more info.', + PendingDeprecationWarning) + # check for configs kwarg for backward compat. + if 'configs' in kwargs.keys(): + self.setConfigs(kwargs.pop('configs', {})) + warnings.warn('Extension classes accepting a dict on the single keyword "config" is ' + 'pending Deprecation. Each setting should be passed into the Class as ' + 'a keyword directly. The "config" keyword will be deprecated in version ' + '2.6 and raise an error in version 2.7. See the Release Notes for ' + 'Python-Markdown version 2.5 for more info.', + PendingDeprecationWarning) + # finally, use kwargs + self.setConfigs(kwargs) + def getConfig(self, key, default=''): """ Return a setting for the given key or an empty string. """ @@ -33,8 +61,20 @@ class Extension(object): def setConfig(self, key, value): """ Set a config setting for `key` with the given `value`. """ + if isinstance(self.config[key][0], bool): + value = parseBoolValue(value) + if self.config[key][0] is None: + value = parseBoolValue(value, preserve_none=True) self.config[key][0] = value + def setConfigs(self, items): + """ Set multiple config settings given a dict or list of tuples. """ + if hasattr(items, 'items'): + # it's a dict + items = items.items() + for key, value in items: + self.setConfig(key, value) + def extendMarkdown(self, md, md_globals): """ Add the various proccesors and patterns to the Markdown Instance. diff --git a/awx/lib/site-packages/markdown/extensions/abbr.py b/awx/lib/site-packages/markdown/extensions/abbr.py index 3f8a443585..58dd0aaad1 100644 --- a/awx/lib/site-packages/markdown/extensions/abbr.py +++ b/awx/lib/site-packages/markdown/extensions/abbr.py @@ -4,22 +4,15 @@ Abbreviation Extension for Python-Markdown This extension adds abbreviation handling to Python-Markdown. -Simple Usage: +See <https://pythonhosted.org/Markdown/extensions/abbreviations.html> +for documentation. - >>> import markdown - >>> text = """ - ... Some text with an ABBR and a REF. Ignore REFERENCE and ref. - ... - ... *[ABBR]: Abbreviation - ... *[REF]: Abbreviation Reference - ... """ - >>> print markdown.markdown(text, ['abbr']) - <p>Some text with an <abbr title="Abbreviation">ABBR</abbr> and a <abbr title="Abbreviation Reference">REF</abbr>. Ignore REFERENCE and ref.</p> +Oringinal code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) and + [Seemant Kulleen](http://www.kulleen.org/) -Copyright 2007-2008 -* [Waylan Limberg](http://achinghead.com/) -* [Seemant Kulleen](http://www.kulleen.org/) - +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) ''' @@ -92,5 +85,5 @@ class AbbrPattern(Pattern): abbr.set('title', self.title) return abbr -def makeExtension(configs=None): - return AbbrExtension(configs=configs) +def makeExtension(*args, **kwargs): + return AbbrExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/admonition.py b/awx/lib/site-packages/markdown/extensions/admonition.py index 9a45b9249c..189f2c2dd9 100644 --- a/awx/lib/site-packages/markdown/extensions/admonition.py +++ b/awx/lib/site-packages/markdown/extensions/admonition.py @@ -4,39 +4,16 @@ Admonition extension for Python-Markdown Adds rST-style admonitions. Inspired by [rST][] feature with the same name. -The syntax is (followed by an indented block with the contents): - !!! [type] [optional explicit title] - -Where `type` is used as a CSS class name of the div. If not present, `title` -defaults to the capitalized `type`, so "note" -> "Note". - -rST suggests the following `types`, but you're free to use whatever you want: - attention, caution, danger, error, hint, important, note, tip, warning - - -A simple example: - !!! note - This is the first line inside the box. - -Outputs: - <div class="admonition note"> - <p class="admonition-title">Note</p> - <p>This is the first line inside the box</p> - </div> - -You can also specify the title and CSS class of the admonition: - !!! custom "Did you know?" - Another line here. - -Outputs: - <div class="admonition custom"> - <p class="admonition-title">Did you know?</p> - <p>Another line here.</p> - </div> - [rST]: http://docutils.sourceforge.net/docs/ref/rst/directives.html#specific-admonitions -By [Tiago Serafim](http://www.tiagoserafim.com/). +See <https://pythonhosted.org/Markdown/extensions/admonition.html> +for documentation. + +Original code Copyright [Tiago Serafim](http://www.tiagoserafim.com/). + +All changes Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -114,5 +91,6 @@ class AdmonitionProcessor(BlockProcessor): return klass, title -def makeExtension(configs={}): - return AdmonitionExtension(configs=configs) +def makeExtension(*args, **kwargs): + return AdmonitionExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/attr_list.py b/awx/lib/site-packages/markdown/extensions/attr_list.py index 8b65f5661d..59da3b42de 100644 --- a/awx/lib/site-packages/markdown/extensions/attr_list.py +++ b/awx/lib/site-packages/markdown/extensions/attr_list.py @@ -6,15 +6,14 @@ Adds attribute list syntax. Inspired by [maruku](http://maruku.rubyforge.org/proposal.html#attribute_lists)'s feature of the same name. -Copyright 2011 [Waylan Limberg](http://achinghead.com/). +See <https://pythonhosted.org/Markdown/extensions/attr_list.html> +for documentation. -Contact: markdown@freewisdom.org +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com/). -License: BSD (see ../LICENSE.md for details) +All changes Copyright 2011-2014 The Python Markdown Project -Dependencies: -* [Python 2.4+](http://python.org) -* [Markdown 2.1+](http://packages.python.org/Markdown/) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -27,7 +26,7 @@ import re try: Scanner = re.Scanner -except AttributeError: +except AttributeError: #pragma: no cover # must be on Python 2.4 from sre import Scanner @@ -164,5 +163,5 @@ class AttrListExtension(Extension): md.treeprocessors.add('attr_list', AttrListTreeprocessor(md), '>prettify') -def makeExtension(configs={}): - return AttrListExtension(configs=configs) +def makeExtension(*args, **kwargs): + return AttrListExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/codehilite.py b/awx/lib/site-packages/markdown/extensions/codehilite.py index 428bd0cb2b..0c3df7e92f 100644 --- a/awx/lib/site-packages/markdown/extensions/codehilite.py +++ b/awx/lib/site-packages/markdown/extensions/codehilite.py @@ -4,17 +4,14 @@ CodeHilite Extension for Python-Markdown Adds code/syntax highlighting to standard Python-Markdown code blocks. -Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/). +See <https://pythonhosted.org/Markdown/extensions/code_hilite.html> +for documentation. -Project website: <http://packages.python.org/Markdown/extensions/code_hilite.html> -Contact: markdown@freewisdom.org +Original code Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/). -License: BSD (see ../LICENSE.md for details) +All changes Copyright 2008-2014 The Python Markdown Project -Dependencies: -* [Python 2.3+](http://python.org/) -* [Markdown 2.0+](http://packages.python.org/Markdown/) -* [Pygments](http://pygments.org/) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -25,8 +22,8 @@ from ..treeprocessors import Treeprocessor import warnings try: from pygments import highlight - from pygments.lexers import get_lexer_by_name, guess_lexer, TextLexer - from pygments.formatters import HtmlFormatter + from pygments.lexers import get_lexer_by_name, guess_lexer + from pygments.formatters import get_formatter_by_name pygments = True except ImportError: pygments = False @@ -112,14 +109,15 @@ class CodeHilite(object): if self.guess_lang: lexer = guess_lexer(self.src) else: - lexer = TextLexer() + lexer = get_lexer_by_name('text') except ValueError: - lexer = TextLexer() - formatter = HtmlFormatter(linenos=self.linenums, - cssclass=self.css_class, - style=self.style, - noclasses=self.noclasses, - hl_lines=self.hl_lines) + lexer = get_lexer_by_name('text') + formatter = get_formatter_by_name('html', + linenos=self.linenums, + cssclass=self.css_class, + style=self.style, + noclasses=self.noclasses, + hl_lines=self.hl_lines) return highlight(self.src, lexer, formatter) else: # just escape and build markup usable by JS highlighting libs @@ -225,7 +223,7 @@ class HiliteTreeprocessor(Treeprocessor): class CodeHiliteExtension(Extension): """ Add source code hilighting to markdown codeblocks. """ - def __init__(self, configs): + def __init__(self, *args, **kwargs): # define default configs self.config = { 'linenums': [None, "Use lines numbers. True=yes, False=no, None=auto"], @@ -237,22 +235,7 @@ class CodeHiliteExtension(Extension): 'noclasses': [False, 'Use inline styles instead of CSS classes - Default false'] } - # Override defaults with user settings - for key, value in configs: - # convert strings to booleans - if value == 'True': value = True - if value == 'False': value = False - if value == 'None': value = None - - if key == 'force_linenos': - warnings.warn('The "force_linenos" config setting' - ' to the CodeHilite extension is deprecrecated.' - ' Use "linenums" instead.', DeprecationWarning) - if value: - # Carry 'force_linenos' over to new 'linenos'. - self.setConfig('linenums', True) - - self.setConfig(key, value) + super(CodeHiliteExtension, self).__init__(*args, **kwargs) def extendMarkdown(self, md, md_globals): """ Add HilitePostprocessor to Markdown instance. """ @@ -263,6 +246,5 @@ class CodeHiliteExtension(Extension): md.registerExtension(self) -def makeExtension(configs={}): - return CodeHiliteExtension(configs=configs) - +def makeExtension(*args, **kwargs): + return CodeHiliteExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/def_list.py b/awx/lib/site-packages/markdown/extensions/def_list.py index df639df931..22e2491a66 100644 --- a/awx/lib/site-packages/markdown/extensions/def_list.py +++ b/awx/lib/site-packages/markdown/extensions/def_list.py @@ -2,19 +2,16 @@ Definition List Extension for Python-Markdown ============================================= -Added parsing of Definition Lists to Python-Markdown. +Adds parsing of Definition Lists to Python-Markdown. -A simple example: +See <https://pythonhosted.org/Markdown/extensions/definition_lists.html> +for documentation. - Apple - : Pomaceous fruit of plants of the genus Malus in - the family Rosaceae. - : An american computer company. +Original code Copyright 2008 [Waylan Limberg](http://achinghead.com) - Orange - : The fruit of an evergreen tree of the genus Citrus. +All changes Copyright 2008-2014 The Python Markdown Project -Copyright 2008 - [Waylan Limberg](http://achinghead.com) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -113,6 +110,6 @@ class DefListExtension(Extension): '>ulist') -def makeExtension(configs={}): - return DefListExtension(configs=configs) +def makeExtension(*args, **kwargs): + return DefListExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/extra.py b/awx/lib/site-packages/markdown/extensions/extra.py index 8986ba683f..4044a874e4 100644 --- a/awx/lib/site-packages/markdown/extensions/extra.py +++ b/awx/lib/site-packages/markdown/extensions/extra.py @@ -11,10 +11,6 @@ convenience so that only one extension needs to be listed when initiating Markdown. See the documentation for each individual extension for specifics about that extension. -In the event that one or more of the supported extensions are not -available for import, Markdown will issue a warning and simply continue -without that extension. - There may be additional extensions that are distributed with Python-Markdown that are not included here in Extra. Those extensions are not part of PHP Markdown Extra, and therefore, not part of @@ -24,6 +20,13 @@ under a differant name. You could also edit the `extensions` global variable defined below, but be aware that such changes may be lost when you upgrade to any future version of Python-Markdown. +See <https://pythonhosted.org/Markdown/extensions/extra.html> +for documentation. + +Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + """ from __future__ import absolute_import @@ -33,19 +36,25 @@ from ..blockprocessors import BlockProcessor from .. import util import re -extensions = ['smart_strong', - 'fenced_code', - 'footnotes', - 'attr_list', - 'def_list', - 'tables', - 'abbr', - ] +extensions = [ + 'markdown.extensions.smart_strong', + 'markdown.extensions.fenced_code', + 'markdown.extensions.footnotes', + 'markdown.extensions.attr_list', + 'markdown.extensions.def_list', + 'markdown.extensions.tables', + 'markdown.extensions.abbr' +] class ExtraExtension(Extension): """ Add various extensions to Markdown class.""" + def __init__(self, *args, **kwargs): + """ config is just a dumb holder which gets passed to actual ext later. """ + self.config = kwargs.pop('configs', {}) + self.config.update(kwargs) + def extendMarkdown(self, md, md_globals): """ Register extension instances. """ md.registerExtensions(extensions, self.config) @@ -60,8 +69,8 @@ class ExtraExtension(Extension): r'^(p|h[1-6]|li|dd|dt|td|th|legend|address)$', re.IGNORECASE) -def makeExtension(configs={}): - return ExtraExtension(configs=dict(configs)) +def makeExtension(*args, **kwargs): + return ExtraExtension(*args, **kwargs) class MarkdownInHtmlProcessor(BlockProcessor): diff --git a/awx/lib/site-packages/markdown/extensions/fenced_code.py b/awx/lib/site-packages/markdown/extensions/fenced_code.py index d6e043c99b..2aacca639b 100644 --- a/awx/lib/site-packages/markdown/extensions/fenced_code.py +++ b/awx/lib/site-packages/markdown/extensions/fenced_code.py @@ -4,87 +4,15 @@ Fenced Code Extension for Python Markdown This extension adds Fenced Code Blocks to Python-Markdown. - >>> import markdown - >>> text = ''' - ... A paragraph before a fenced code block: - ... - ... ~~~ - ... Fenced code block - ... ~~~ - ... ''' - >>> html = markdown.markdown(text, extensions=['fenced_code']) - >>> print html - <p>A paragraph before a fenced code block:</p> - <pre><code>Fenced code block - </code></pre> +See <https://pythonhosted.org/Markdown/extensions/fenced_code_blocks.html> +for documentation. -Works with safe_mode also (we check this because we are using the HtmlStash): +Original code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/). - >>> print markdown.markdown(text, extensions=['fenced_code'], safe_mode='replace') - <p>A paragraph before a fenced code block:</p> - <pre><code>Fenced code block - </code></pre> -Include tilde's in a code block and wrap with blank lines: - - >>> text = ''' - ... ~~~~~~~~ - ... - ... ~~~~ - ... ~~~~~~~~''' - >>> print markdown.markdown(text, extensions=['fenced_code']) - <pre><code> - ~~~~ - </code></pre> - -Language tags: - - >>> text = ''' - ... ~~~~{.python} - ... # Some python code - ... ~~~~''' - >>> print markdown.markdown(text, extensions=['fenced_code']) - <pre><code class="python"># Some python code - </code></pre> - -Optionally backticks instead of tildes as per how github's code block markdown is identified: - - >>> text = ''' - ... ````` - ... # Arbitrary code - ... ~~~~~ # these tildes will not close the block - ... `````''' - >>> print markdown.markdown(text, extensions=['fenced_code']) - <pre><code># Arbitrary code - ~~~~~ # these tildes will not close the block - </code></pre> - -If the codehighlite extension and Pygments are installed, lines can be highlighted: - - >>> text = ''' - ... ```hl_lines="1 3" - ... line 1 - ... line 2 - ... line 3 - ... ```''' - >>> print markdown.markdown(text, extensions=['codehilite', 'fenced_code']) - <pre><code><span class="hilight">line 1</span> - line 2 - <span class="hilight">line 3</span> - </code></pre> - -Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/). - -Project website: <http://packages.python.org/Markdown/extensions/fenced_code_blocks.html> -Contact: markdown@freewisdom.org - -License: BSD (see ../docs/LICENSE for details) - -Dependencies: -* [Python 2.4+](http://python.org) -* [Markdown 2.0+](http://packages.python.org/Markdown/) -* [Pygments (optional)](http://pygments.org) +All changes Copyright 2008-2014 The Python Markdown Project +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ from __future__ import absolute_import @@ -175,5 +103,6 @@ class FencedBlockPreprocessor(Preprocessor): return txt -def makeExtension(configs=None): - return FencedCodeExtension(configs=configs) +def makeExtension(*args, **kwargs): + return FencedCodeExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/footnotes.py b/awx/lib/site-packages/markdown/extensions/footnotes.py index 9f93ad1b5d..a59de970b4 100644 --- a/awx/lib/site-packages/markdown/extensions/footnotes.py +++ b/awx/lib/site-packages/markdown/extensions/footnotes.py @@ -1,25 +1,15 @@ """ -========================= FOOTNOTES ================================= +Footnotes Extension for Python-Markdown +======================================= -This section adds footnote handling to markdown. It can be used as -an example for extending python-markdown with relatively complex -functionality. While in this case the extension is included inside -the module itself, it could just as easily be added from outside the -module. Not that all markdown classes above are ignorant about -footnotes. All footnote functionality is provided separately and -then added to the markdown instance at the run time. +Adds footnote handling to Python-Markdown. -Footnote functionality is attached by calling extendMarkdown() -method of FootnoteExtension. The method also registers the -extension to allow it's state to be reset by a call to reset() -method. +See <https://pythonhosted.org/Markdown/extensions/footnotes.html> +for documentation. -Example: - Footnotes[^1] have a label[^label] and a definition[^!DEF]. +Copyright The Python Markdown Project - [^1]: This is a footnote - [^label]: A footnote on "label" - [^!DEF]: The footnote for definition +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -42,23 +32,23 @@ TABBED_RE = re.compile(r'((\t)|( ))(.*)') class FootnoteExtension(Extension): """ Footnote Extension. """ - def __init__ (self, configs): + def __init__ (self, *args, **kwargs): """ Setup configs. """ - self.config = {'PLACE_MARKER': - ["///Footnotes Go Here///", - "The text string that marks where the footnotes go"], - 'UNIQUE_IDS': - [False, - "Avoid name collisions across " - "multiple calls to reset()."], - "BACKLINK_TEXT": - ["↩", - "The text string that links from the footnote to the reader's place."] - } - - for key, value in configs: - self.config[key][0] = value + self.config = { + 'PLACE_MARKER': + ["///Footnotes Go Here///", + "The text string that marks where the footnotes go"], + 'UNIQUE_IDS': + [False, + "Avoid name collisions across " + "multiple calls to reset()."], + "BACKLINK_TEXT": + ["↩", + "The text string that links from the footnote to the reader's place."] + } + super(FootnoteExtension, self).__init__(*args, **kwargs) + # In multiple invocations, emit links that don't get tangled. self.unique_prefix = 0 @@ -309,7 +299,7 @@ class FootnotePostprocessor(Postprocessor): text = text.replace(FN_BACKLINK_TEXT, self.footnotes.getConfig("BACKLINK_TEXT")) return text.replace(NBSP_PLACEHOLDER, " ") -def makeExtension(configs=[]): +def makeExtension(*args, **kwargs): """ Return an instance of the FootnoteExtension """ - return FootnoteExtension(configs=configs) + return FootnoteExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/headerid.py b/awx/lib/site-packages/markdown/extensions/headerid.py index 8221fe1d52..f7b7805bf8 100644 --- a/awx/lib/site-packages/markdown/extensions/headerid.py +++ b/awx/lib/site-packages/markdown/extensions/headerid.py @@ -4,73 +4,14 @@ HeaderID Extension for Python-Markdown Auto-generate id attributes for HTML headers. -Basic usage: +See <https://pythonhosted.org/Markdown/extensions/header_id.html> +for documentation. - >>> import markdown - >>> text = "# Some Header #" - >>> md = markdown.markdown(text, ['headerid']) - >>> print md - <h1 id="some-header">Some Header</h1> +Original code Copyright 2007-2011 [Waylan Limberg](http://achinghead.com/). -All header IDs are unique: +All changes Copyright 2011-2014 The Python Markdown Project - >>> text = ''' - ... #Header - ... #Header - ... #Header''' - >>> md = markdown.markdown(text, ['headerid']) - >>> print md - <h1 id="header">Header</h1> - <h1 id="header_1">Header</h1> - <h1 id="header_2">Header</h1> - -To fit within a html template's hierarchy, set the header base level: - - >>> text = ''' - ... #Some Header - ... ## Next Level''' - >>> md = markdown.markdown(text, ['headerid(level=3)']) - >>> print md - <h3 id="some-header">Some Header</h3> - <h4 id="next-level">Next Level</h4> - -Works with inline markup. - - >>> text = '#Some *Header* with [markup](http://example.com).' - >>> md = markdown.markdown(text, ['headerid']) - >>> print md - <h1 id="some-header-with-markup">Some <em>Header</em> with <a href="http://example.com">markup</a>.</h1> - -Turn off auto generated IDs: - - >>> text = ''' - ... # Some Header - ... # Another Header''' - >>> md = markdown.markdown(text, ['headerid(forceid=False)']) - >>> print md - <h1>Some Header</h1> - <h1>Another Header</h1> - -Use with MetaData extension: - - >>> text = '''header_level: 2 - ... header_forceid: Off - ... - ... # A Header''' - >>> md = markdown.markdown(text, ['headerid', 'meta']) - >>> print md - <h2>A Header</h2> - -Copyright 2007-2011 [Waylan Limberg](http://achinghead.com/). - -Project website: <http://packages.python.org/Markdown/extensions/header_id.html> -Contact: markdown@freewisdom.org - -License: BSD (see ../docs/LICENSE for details) - -Dependencies: -* [Python 2.3+](http://python.org) -* [Markdown 2.0+](http://packages.python.org/Markdown/) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -127,7 +68,7 @@ def stashedHTML2text(text, md): def _html_sub(m): """ Substitute raw html with plain text. """ try: - raw, safe = md.htmlStash.rawHtmlBlocks[int(m.group(1))] + raw, safe = md.htmlStash.rawHtmlBlocks[int(m.group(1))] except (IndexError, TypeError): return m.group(0) if md.safeMode and not safe: @@ -176,7 +117,7 @@ class HeaderIdTreeprocessor(Treeprocessor): class HeaderIdExtension(Extension): - def __init__(self, configs): + def __init__(self, *args, **kwargs): # set defaults self.config = { 'level' : ['1', 'Base level for headers.'], @@ -185,8 +126,7 @@ class HeaderIdExtension(Extension): 'slugify' : [slugify, 'Callable to generate anchors'], } - for key, value in configs: - self.setConfig(key, value) + super(HeaderIdExtension, self).__init__(*args, **kwargs) def extendMarkdown(self, md, md_globals): md.registerExtension(self) @@ -204,5 +144,6 @@ class HeaderIdExtension(Extension): self.processor.IDs = set() -def makeExtension(configs=None): - return HeaderIdExtension(configs=configs) +def makeExtension(*args, **kwargs): + return HeaderIdExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/meta.py b/awx/lib/site-packages/markdown/extensions/meta.py index c4a4b210f9..bcc25a09b5 100644 --- a/awx/lib/site-packages/markdown/extensions/meta.py +++ b/awx/lib/site-packages/markdown/extensions/meta.py @@ -4,38 +4,14 @@ Meta Data Extension for Python-Markdown This extension adds Meta Data handling to markdown. -Basic Usage: +See <https://pythonhosted.org/Markdown/extensions/meta_data.html> +for documentation. - >>> import markdown - >>> text = '''Title: A Test Doc. - ... Author: Waylan Limberg - ... John Doe - ... Blank_Data: - ... - ... The body. This is paragraph one. - ... ''' - >>> md = markdown.Markdown(['meta']) - >>> print md.convert(text) - <p>The body. This is paragraph one.</p> - >>> print md.Meta - {u'blank_data': [u''], u'author': [u'Waylan Limberg', u'John Doe'], u'title': [u'A Test Doc.']} +Original code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com). -Make sure text without Meta Data still works (markdown < 1.6b returns a <p>). +All changes Copyright 2008-2014 The Python Markdown Project - >>> text = ' Some Code - not extra lines of meta data.' - >>> md = markdown.Markdown(['meta']) - >>> print md.convert(text) - <pre><code>Some Code - not extra lines of meta data. - </code></pre> - >>> md.Meta - {} - -Copyright 2007-2008 [Waylan Limberg](http://achinghead.com). - -Project website: <http://packages.python.org/Markdown/meta_data.html> -Contact: markdown@freewisdom.org - -License: BSD (see ../LICENSE.md for details) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -55,7 +31,7 @@ class MetaExtension (Extension): def extendMarkdown(self, md, md_globals): """ Add MetaPreprocessor to Markdown instance. """ - md.preprocessors.add("meta", MetaPreprocessor(md), "_begin") + md.preprocessors.add("meta", MetaPreprocessor(md), ">normalize_whitespace") class MetaPreprocessor(Preprocessor): @@ -89,5 +65,6 @@ class MetaPreprocessor(Preprocessor): return lines -def makeExtension(configs={}): - return MetaExtension(configs=configs) +def makeExtension(*args, **kwargs): + return MetaExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/nl2br.py b/awx/lib/site-packages/markdown/extensions/nl2br.py index da4b339958..062a7e6e2e 100644 --- a/awx/lib/site-packages/markdown/extensions/nl2br.py +++ b/awx/lib/site-packages/markdown/extensions/nl2br.py @@ -5,18 +5,14 @@ NL2BR Extension A Python-Markdown extension to treat newlines as hard breaks; like GitHub-flavored Markdown does. -Usage: +See <https://pythonhosted.org/Markdown/extensions/nl2br.html> +for documentation. - >>> import markdown - >>> print markdown.markdown('line 1\\nline 2', extensions=['nl2br']) - <p>line 1<br /> - line 2</p> +Oringinal code Copyright 2011 [Brian Neal](http://deathofagremmie.com/) -Copyright 2011 [Brian Neal](http://deathofagremmie.com/) +All changes Copyright 2011-2014 The Python Markdown Project -Dependencies: -* [Python 2.4+](http://python.org) -* [Markdown 2.1+](http://packages.python.org/Markdown/) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -34,5 +30,6 @@ class Nl2BrExtension(Extension): md.inlinePatterns.add('nl', br_tag, '_end') -def makeExtension(configs=None): - return Nl2BrExtension(configs) +def makeExtension(*args, **kwargs): + return Nl2BrExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/sane_lists.py b/awx/lib/site-packages/markdown/extensions/sane_lists.py index fda663828e..9eb3a11f88 100644 --- a/awx/lib/site-packages/markdown/extensions/sane_lists.py +++ b/awx/lib/site-packages/markdown/extensions/sane_lists.py @@ -2,19 +2,16 @@ Sane List Extension for Python-Markdown ======================================= -Modify the behavior of Lists in Python-Markdown t act in a sane manor. +Modify the behavior of Lists in Python-Markdown to act in a sane manor. -In standard Markdown syntax, the following would constitute a single -ordered list. However, with this extension, the output would include -two lists, the first an ordered list and the second and unordered list. +See <https://pythonhosted.org/Markdown/extensions/sane_lists.html> +for documentation. - 1. ordered - 2. list +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com) - * unordered - * list +All changes Copyright 2011-2014 The Python Markdown Project -Copyright 2011 - [Waylan Limberg](http://achinghead.com) +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -46,6 +43,6 @@ class SaneListExtension(Extension): md.parser.blockprocessors['ulist'] = SaneUListProcessor(md.parser) -def makeExtension(configs={}): - return SaneListExtension(configs=configs) +def makeExtension(*args, **kwargs): + return SaneListExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/smart_strong.py b/awx/lib/site-packages/markdown/extensions/smart_strong.py index 4818cf9ea8..331dae8aeb 100644 --- a/awx/lib/site-packages/markdown/extensions/smart_strong.py +++ b/awx/lib/site-packages/markdown/extensions/smart_strong.py @@ -4,21 +4,14 @@ Smart_Strong Extension for Python-Markdown This extention adds smarter handling of double underscores within words. -Simple Usage: +See <https://pythonhosted.org/Markdown/extensions/smart_strong.html> +for documentation. - >>> import markdown - >>> print markdown.markdown('Text with double__underscore__words.', - ... extensions=['smart_strong']) - <p>Text with double__underscore__words.</p> - >>> print markdown.markdown('__Strong__ still works.', - ... extensions=['smart_strong']) - <p><strong>Strong</strong> still works.</p> - >>> print markdown.markdown('__this__works__too__.', - ... extensions=['smart_strong']) - <p><strong>this__works__too</strong>.</p> +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com) -Copyright 2011 -[Waylan Limberg](http://achinghead.com) +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) ''' @@ -38,5 +31,5 @@ class SmartEmphasisExtension(Extension): md.inlinePatterns['strong'] = SimpleTagPattern(STRONG_RE, 'strong') md.inlinePatterns.add('strong2', SimpleTagPattern(SMART_STRONG_RE, 'strong'), '>emphasis2') -def makeExtension(configs={}): - return SmartEmphasisExtension(configs=dict(configs)) +def makeExtension(*args, **kwargs): + return SmartEmphasisExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/smarty.py b/awx/lib/site-packages/markdown/extensions/smarty.py index 2f946f8294..00c330f1f0 100644 --- a/awx/lib/site-packages/markdown/extensions/smarty.py +++ b/awx/lib/site-packages/markdown/extensions/smarty.py @@ -1,73 +1,91 @@ # -*- coding: utf-8 -*- -# Smarty extension for Python-Markdown -# Author: 2013, Dmitry Shachnev <mitya57@gmail.com> +''' +Smarty extension for Python-Markdown +==================================== + +Adds conversion of ASCII dashes, quotes and ellipses to their HTML +entity equivalents. + +See <https://pythonhosted.org/Markdown/extensions/smarty.html> +for documentation. + +Author: 2013, Dmitry Shachnev <mitya57@gmail.com> + +All changes Copyright 2013-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +SmartyPants license: + + Copyright (c) 2003 John Gruber <http://daringfireball.net/> + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name "SmartyPants" nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + + +smartypants.py license: + + smartypants.py is a derivative work of SmartyPants. + Copyright (c) 2004, 2007 Chad Miller <http://web.chad.org/> + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + +''' -# SmartyPants license: -# -# Copyright (c) 2003 John Gruber <http://daringfireball.net/> -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# * Neither the name "SmartyPants" nor the names of its contributors -# may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# This software is provided by the copyright holders and contributors "as -# is" and any express or implied warranties, including, but not limited -# to, the implied warranties of merchantability and fitness for a -# particular purpose are disclaimed. In no event shall the copyright -# owner or contributors be liable for any direct, indirect, incidental, -# special, exemplary, or consequential damages (including, but not -# limited to, procurement of substitute goods or services; loss of use, -# data, or profits; or business interruption) however caused and on any -# theory of liability, whether in contract, strict liability, or tort -# (including negligence or otherwise) arising in any way out of the use -# of this software, even if advised of the possibility of such damage. -# -# -# smartypants.py license: -# -# smartypants.py is a derivative work of SmartyPants. -# Copyright (c) 2004, 2007 Chad Miller <http://web.chad.org/> -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# This software is provided by the copyright holders and contributors "as -# is" and any express or implied warranties, including, but not limited -# to, the implied warranties of merchantability and fitness for a -# particular purpose are disclaimed. In no event shall the copyright -# owner or contributors be liable for any direct, indirect, incidental, -# special, exemplary, or consequential damages (including, but not -# limited to, procurement of substitute goods or services; loss of use, -# data, or profits; or business interruption) however caused and on any -# theory of liability, whether in contract, strict liability, or tort -# (including negligence or otherwise) arising in any way out of the use -# of this software, even if advised of the possibility of such damage. from __future__ import unicode_literals from . import Extension from ..inlinepatterns import HtmlPattern +from ..odict import OrderedDict +from ..treeprocessors import InlineProcessor from ..util import parseBoolValue # Constants for quote education. @@ -83,12 +101,25 @@ openingQuotesBase = ( '|&[mn]dash;' # or named dash entities '|–|—' # or decimal entities ')' -) +) + +substitutions = { + 'mdash': '—', + 'ndash': '–', + 'ellipsis': '…', + 'left-angle-quote': '«', + 'right-angle-quote': '»', + 'left-single-quote': '‘', + 'right-single-quote': '’', + 'left-double-quote': '“', + 'right-double-quote': '”', +} + # Special case if the very first character is a quote # followed by punctuation at a non-word-break. Close the quotes by brute force: -singleQuoteStartRe = r"^'(?=%s\\B)" % punctClass -doubleQuoteStartRe = r'^"(?=%s\\B)' % punctClass +singleQuoteStartRe = r"^'(?=%s\B)" % punctClass +doubleQuoteStartRe = r'^"(?=%s\B)' % punctClass # Special case for double sets of quotes, e.g.: # <p>He said, "'Quoted' words in a larger quote."</p> @@ -113,8 +144,6 @@ closingSingleQuotesRegex2 = r"(?<=%s)'(\s|s\b)" % closeClass remainingSingleQuotesRegex = "'" remainingDoubleQuotesRegex = '"' -lsquo, rsquo, ldquo, rdquo = '‘', '’', '“', '”' - class SubstituteTextPattern(HtmlPattern): def __init__(self, pattern, replace, markdown_instance): """ Replaces matches with some text. """ @@ -132,35 +161,56 @@ class SubstituteTextPattern(HtmlPattern): return result class SmartyExtension(Extension): - def __init__(self, configs): + def __init__(self, *args, **kwargs): self.config = { 'smart_quotes': [True, 'Educate quotes'], + 'smart_angled_quotes': [False, 'Educate angled quotes'], 'smart_dashes': [True, 'Educate dashes'], - 'smart_ellipses': [True, 'Educate ellipses'] + 'smart_ellipses': [True, 'Educate ellipses'], + 'substitutions' : [{}, 'Overwrite default substitutions'], } - for key, value in configs: - self.setConfig(key, parseBoolValue(value)) + super(SmartyExtension, self).__init__(*args, **kwargs) + self.substitutions = dict(substitutions) + self.substitutions.update(self.getConfig('substitutions', default={})) def _addPatterns(self, md, patterns, serie): for ind, pattern in enumerate(patterns): pattern += (md,) pattern = SubstituteTextPattern(*pattern) - after = ('>smarty-%s-%d' % (serie, ind - 1) if ind else '>entity') + after = ('>smarty-%s-%d' % (serie, ind - 1) if ind else '_begin') name = 'smarty-%s-%d' % (serie, ind) - md.inlinePatterns.add(name, pattern, after) + self.inlinePatterns.add(name, pattern, after) def educateDashes(self, md): - emDashesPattern = SubstituteTextPattern(r'(?<!-)---(?!-)', ('—',), md) - enDashesPattern = SubstituteTextPattern(r'(?<!-)--(?!-)', ('–',), md) - md.inlinePatterns.add('smarty-em-dashes', emDashesPattern, '>entity') - md.inlinePatterns.add('smarty-en-dashes', enDashesPattern, + emDashesPattern = SubstituteTextPattern(r'(?<!-)---(?!-)', + (self.substitutions['mdash'],), md) + enDashesPattern = SubstituteTextPattern(r'(?<!-)--(?!-)', + (self.substitutions['ndash'],), md) + self.inlinePatterns.add('smarty-em-dashes', emDashesPattern, '_begin') + self.inlinePatterns.add('smarty-en-dashes', enDashesPattern, '>smarty-em-dashes') def educateEllipses(self, md): - ellipsesPattern = SubstituteTextPattern(r'(?<!\.)\.{3}(?!\.)', ('…',), md) - md.inlinePatterns.add('smarty-ellipses', ellipsesPattern, '>entity') + ellipsesPattern = SubstituteTextPattern(r'(?<!\.)\.{3}(?!\.)', + (self.substitutions['ellipsis'],), md) + self.inlinePatterns.add('smarty-ellipses', ellipsesPattern, '_begin') + + def educateAngledQuotes(self, md): + leftAngledQuotePattern = SubstituteTextPattern(r'\<\<', + (self.substitutions['left-angle-quote'],), md) + rightAngledQuotePattern = SubstituteTextPattern(r'\>\>', + (self.substitutions['right-angle-quote'],), md) + self.inlinePatterns.add('smarty-left-angle-quotes', + leftAngledQuotePattern, '_begin') + self.inlinePatterns.add('smarty-right-angle-quotes', + rightAngledQuotePattern, '>smarty-left-angle-quotes') def educateQuotes(self, md): + configs = self.getConfigs() + lsquo = self.substitutions['left-single-quote'] + rsquo = self.substitutions['right-single-quote'] + ldquo = self.substitutions['left-double-quote'] + rdquo = self.substitutions['right-double-quote'] patterns = ( (singleQuoteStartRe, (rsquo,)), (doubleQuoteStartRe, (rdquo,)), @@ -179,13 +229,19 @@ class SmartyExtension(Extension): def extendMarkdown(self, md, md_globals): configs = self.getConfigs() - if configs['smart_quotes']: - self.educateQuotes(md) - if configs['smart_dashes']: - self.educateDashes(md) + self.inlinePatterns = OrderedDict() if configs['smart_ellipses']: self.educateEllipses(md) + if configs['smart_quotes']: + self.educateQuotes(md) + if configs['smart_angled_quotes']: + self.educateAngledQuotes(md) + if configs['smart_dashes']: + self.educateDashes(md) + inlineProcessor = InlineProcessor(md) + inlineProcessor.inlinePatterns = self.inlinePatterns + md.treeprocessors.add('smarty', inlineProcessor, '_end') md.ESCAPED_CHARS.extend(['"', "'"]) -def makeExtension(configs=None): - return SmartyExtension(configs) +def makeExtension(*args, **kwargs): + return SmartyExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/tables.py b/awx/lib/site-packages/markdown/extensions/tables.py index ad52ec11c7..57507e96d8 100644 --- a/awx/lib/site-packages/markdown/extensions/tables.py +++ b/awx/lib/site-packages/markdown/extensions/tables.py @@ -4,14 +4,15 @@ Tables Extension for Python-Markdown Added parsing of tables to Python-Markdown. -A simple example: +See <https://pythonhosted.org/Markdown/extensions/tables.html> +for documentation. - First Header | Second Header - ------------- | ------------- - Content Cell | Content Cell - Content Cell | Content Cell +Original code Copyright 2009 [Waylan Limberg](http://achinghead.com) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) -Copyright 2009 - [Waylan Limberg](http://achinghead.com) """ from __future__ import absolute_import @@ -71,7 +72,7 @@ class TableProcessor(BlockProcessor): c = etree.SubElement(tr, tag) try: c.text = cells[i].strip() - except IndexError: + except IndexError: #pragma: no cover c.text = "" if a: c.set('align', a) @@ -96,5 +97,6 @@ class TableExtension(Extension): '<hashheader') -def makeExtension(configs={}): - return TableExtension(configs=configs) +def makeExtension(*args, **kwargs): + return TableExtension(*args, **kwargs) + diff --git a/awx/lib/site-packages/markdown/extensions/toc.py b/awx/lib/site-packages/markdown/extensions/toc.py index 89468d60c7..f7fb675b84 100644 --- a/awx/lib/site-packages/markdown/extensions/toc.py +++ b/awx/lib/site-packages/markdown/extensions/toc.py @@ -1,11 +1,15 @@ """ Table of Contents Extension for Python-Markdown -* * * +=============================================== -(c) 2008 [Jack Miller](http://codezen.org) +See <https://pythonhosted.org/Markdown/extensions/toc.html> +for documentation. -Dependencies: -* [Markdown 2.1+](http://packages.python.org/Markdown/) +Oringinal code Copyright 2008 [Jack Miller](http://codezen.org) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ @@ -23,60 +27,59 @@ def order_toc_list(toc_list): [{'level': 1}, {'level': 2}] => [{'level': 1, 'children': [{'level': 2, 'children': []}]}] - + A wrong list is also converted: [{'level': 2}, {'level': 1}] => [{'level': 2, 'children': []}, {'level': 1, 'children': []}] """ - - def build_correct(remaining_list, prev_elements=[{'level': 1000}]): - - if not remaining_list: - return [], [] - - current = remaining_list.pop(0) - if not 'children' in current.keys(): - current['children'] = [] - - if not prev_elements: - # This happens for instance with [8, 1, 1], ie. when some - # header level is outside a scope. We treat it as a - # top-level - next_elements, children = build_correct(remaining_list, [current]) - current['children'].append(children) - return [current] + next_elements, [] - - prev_element = prev_elements.pop() - children = [] - next_elements = [] - # Is current part of the child list or next list? - if current['level'] > prev_element['level']: - #print "%d is a child of %d" % (current['level'], prev_element['level']) - prev_elements.append(prev_element) - prev_elements.append(current) - prev_element['children'].append(current) - next_elements2, children2 = build_correct(remaining_list, prev_elements) - children += children2 - next_elements += next_elements2 - else: - #print "%d is ancestor of %d" % (current['level'], prev_element['level']) - if not prev_elements: - #print "No previous elements, so appending to the next set" - next_elements.append(current) - prev_elements = [current] - next_elements2, children2 = build_correct(remaining_list, prev_elements) - current['children'].extend(children2) + + ordered_list = [] + if len(toc_list): + # Initialize everything by processing the first entry + last = toc_list.pop(0) + last['children'] = [] + levels = [last['level']] + ordered_list.append(last) + parents = [] + + # Walk the rest nesting the entries properly + while toc_list: + t = toc_list.pop(0) + current_level = t['level'] + t['children'] = [] + + # Reduce depth if current level < last item's level + if current_level < levels[-1]: + # Pop last level since we know we are less than it + levels.pop() + + # Pop parents and levels we are less than or equal to + to_pop = 0 + for p in reversed(parents): + if current_level <= p['level']: + to_pop += 1 + else: + break + if to_pop: + levels = levels[:-to_pop] + parents = parents[:-to_pop] + + # Note current level as last + levels.append(current_level) + + # Level is the same, so append to the current parent (if available) + if current_level == levels[-1]: + (parents[-1]['children'] if parents else ordered_list).append(t) + + # Current level is > last item's level, + # So make last item a parent and append current as child else: - #print "Previous elements, comparing to those first" - remaining_list.insert(0, current) - next_elements2, children2 = build_correct(remaining_list, prev_elements) - children.extend(children2) - next_elements += next_elements2 - - return next_elements, children - - ordered_list, __ = build_correct(toc_list) + last['children'].append(t) + parents.append(last) + levels.append(current_level) + last = t + return ordered_list @@ -204,26 +207,26 @@ class TocExtension(Extension): TreeProcessorClass = TocTreeprocessor - def __init__(self, configs=[]): - self.config = { "marker" : ["[TOC]", - "Text to find and replace with Table of Contents -" - "Defaults to \"[TOC]\""], - "slugify" : [slugify, - "Function to generate anchors based on header text-" - "Defaults to the headerid ext's slugify function."], - "title" : [None, - "Title to insert into TOC <div> - " - "Defaults to None"], - "anchorlink" : [0, - "1 if header should be a self link" - "Defaults to 0"], - "permalink" : [0, - "1 or link text if a Sphinx-style permalink should be added", - "Defaults to 0"] - } + def __init__(self, *args, **kwargs): + self.config = { + "marker" : ["[TOC]", + "Text to find and replace with Table of Contents - " + "Defaults to \"[TOC]\""], + "slugify" : [slugify, + "Function to generate anchors based on header text - " + "Defaults to the headerid ext's slugify function."], + "title" : ["", + "Title to insert into TOC <div> - " + "Defaults to an empty string"], + "anchorlink" : [0, + "1 if header should be a self link - " + "Defaults to 0"], + "permalink" : [0, + "1 or link text if a Sphinx-style permalink should be added - " + "Defaults to 0"] + } - for key, value in configs: - self.setConfig(key, value) + super(TocExtension, self).__init__(*args, **kwargs) def extendMarkdown(self, md, md_globals): tocext = self.TreeProcessorClass(md) @@ -236,5 +239,5 @@ class TocExtension(Extension): md.treeprocessors.add("toc", tocext, "_end") -def makeExtension(configs={}): - return TocExtension(configs=configs) +def makeExtension(*args, **kwargs): + return TocExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/extensions/wikilinks.py b/awx/lib/site-packages/markdown/extensions/wikilinks.py index ba1947c0b1..64377cf3e2 100644 --- a/awx/lib/site-packages/markdown/extensions/wikilinks.py +++ b/awx/lib/site-packages/markdown/extensions/wikilinks.py @@ -2,78 +2,17 @@ WikiLinks Extension for Python-Markdown ====================================== -Converts [[WikiLinks]] to relative links. Requires Python-Markdown 2.0+ +Converts [[WikiLinks]] to relative links. -Basic usage: +See <https://pythonhosted.org/Markdown/extensions/wikilinks.html> +for documentation. - >>> import markdown - >>> text = "Some text with a [[WikiLink]]." - >>> html = markdown.markdown(text, ['wikilinks']) - >>> print html - <p>Some text with a <a class="wikilink" href="/WikiLink/">WikiLink</a>.</p> +Original code Copyright [Waylan Limberg](http://achinghead.com/). -Whitespace behavior: - - >>> print markdown.markdown('[[ foo bar_baz ]]', ['wikilinks']) - <p><a class="wikilink" href="/foo_bar_baz/">foo bar_baz</a></p> - >>> print markdown.markdown('foo [[ ]] bar', ['wikilinks']) - <p>foo bar</p> - -To define custom settings the simple way: - - >>> print markdown.markdown(text, - ... ['wikilinks(base_url=/wiki/,end_url=.html,html_class=foo)'] - ... ) - <p>Some text with a <a class="foo" href="/wiki/WikiLink.html">WikiLink</a>.</p> - -Custom settings the complex way: - - >>> md = markdown.Markdown( - ... extensions = ['wikilinks'], - ... extension_configs = {'wikilinks': [ - ... ('base_url', 'http://example.com/'), - ... ('end_url', '.html'), - ... ('html_class', '') ]}, - ... safe_mode = True) - >>> print md.convert(text) - <p>Some text with a <a href="http://example.com/WikiLink.html">WikiLink</a>.</p> - -Use MetaData with mdx_meta.py (Note the blank html_class in MetaData): - - >>> text = """wiki_base_url: http://example.com/ - ... wiki_end_url: .html - ... wiki_html_class: - ... - ... Some text with a [[WikiLink]].""" - >>> md = markdown.Markdown(extensions=['meta', 'wikilinks']) - >>> print md.convert(text) - <p>Some text with a <a href="http://example.com/WikiLink.html">WikiLink</a>.</p> - -MetaData should not carry over to next document: - - >>> print md.convert("No [[MetaData]] here.") - <p>No <a class="wikilink" href="/MetaData/">MetaData</a> here.</p> - -Define a custom URL builder: - - >>> def my_url_builder(label, base, end): - ... return '/bar/' - >>> md = markdown.Markdown(extensions=['wikilinks'], - ... extension_configs={'wikilinks' : [('build_url', my_url_builder)]}) - >>> print md.convert('[[foo]]') - <p><a class="wikilink" href="/bar/">foo</a></p> - -From the command line: - - python markdown.py -x wikilinks(base_url=http://example.com/,end_url=.html,html_class=foo) src.txt - -By [Waylan Limberg](http://achinghead.com/). +All changes Copyright The Python Markdown Project License: [BSD](http://www.opensource.org/licenses/bsd-license.php) -Dependencies: -* [Python 2.3+](http://python.org) -* [Markdown 2.0+](http://packages.python.org/Markdown/) ''' from __future__ import absolute_import @@ -90,19 +29,17 @@ def build_url(label, base, end): class WikiLinkExtension(Extension): - def __init__(self, configs): - # set extension defaults + + def __init__ (self, *args, **kwargs): self.config = { - 'base_url' : ['/', 'String to append to beginning or URL.'], - 'end_url' : ['/', 'String to append to end of URL.'], - 'html_class' : ['wikilink', 'CSS hook. Leave blank for none.'], - 'build_url' : [build_url, 'Callable formats URL from label.'], + 'base_url' : ['/', 'String to append to beginning or URL.'], + 'end_url' : ['/', 'String to append to end of URL.'], + 'html_class' : ['wikilink', 'CSS hook. Leave blank for none.'], + 'build_url' : [build_url, 'Callable formats URL from label.'], } - configs = dict(configs) or {} - # Override defaults with user settings - for key, value in configs.items(): - self.setConfig(key, value) + super(WikiLinkExtension, self).__init__(*args, **kwargs) + def extendMarkdown(self, md, md_globals): self.md = md @@ -147,5 +84,5 @@ class WikiLinks(Pattern): return base_url, end_url, html_class -def makeExtension(configs=None) : - return WikiLinkExtension(configs=configs) +def makeExtension(*args, **kwargs) : + return WikiLinkExtension(*args, **kwargs) diff --git a/awx/lib/site-packages/markdown/inlinepatterns.py b/awx/lib/site-packages/markdown/inlinepatterns.py index 9335748730..c9d82fdc75 100644 --- a/awx/lib/site-packages/markdown/inlinepatterns.py +++ b/awx/lib/site-packages/markdown/inlinepatterns.py @@ -46,13 +46,13 @@ from __future__ import unicode_literals from . import util from . import odict import re -try: +try: #pragma: no cover from urllib.parse import urlparse, urlunparse -except ImportError: +except ImportError: #pragma: no cover from urlparse import urlparse, urlunparse -try: +try: #pragma: no cover from html import entities -except ImportError: +except ImportError: #pragma: no cover import htmlentitydefs as entities @@ -75,7 +75,8 @@ def build_inlinepatterns(md_instance, **kwargs): inlinePatterns["html"] = HtmlPattern(HTML_RE, md_instance) inlinePatterns["entity"] = HtmlPattern(ENTITY_RE, md_instance) inlinePatterns["not_strong"] = SimpleTextPattern(NOT_STRONG_RE) - inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'strong,em') + inlinePatterns["em_strong"] = DoubleTagPattern(EM_STRONG_RE, 'strong,em') + inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'em,strong') inlinePatterns["strong"] = SimpleTagPattern(STRONG_RE, 'strong') inlinePatterns["emphasis"] = SimpleTagPattern(EMPHASIS_RE, 'em') if md_instance.smart_emphasis: @@ -100,7 +101,8 @@ BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' # `e=f()` or ``e=f("`")`` ESCAPE_RE = r'\\(.)' # \< EMPHASIS_RE = r'(\*)([^\*]+)\2' # *emphasis* STRONG_RE = r'(\*{2}|_{2})(.+?)\2' # **strong** -STRONG_EM_RE = r'(\*{3}|_{3})(.+?)\2' # ***strong*** +EM_STRONG_RE = r'(\*|_)\2{2}(.+?)\2(.*?)\2{2}' # ***strongem*** or ***em*strong** +STRONG_EM_RE = r'(\*|_)\2{2}(.+?)\2{2}(.*?)\2' # ***strong**em* SMART_EMPHASIS_RE = r'(?<!\w)(_)(?!_)(.+?)(?<!_)\2(?!\w)' # _smart_emphasis_ EMPHASIS_2_RE = r'(_)(.+?)\2' # _emphasis_ LINK_RE = NOIMG + BRK + \ @@ -156,7 +158,7 @@ class Pattern(object): """ self.pattern = pattern - self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern, + self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern, re.DOTALL | re.UNICODE) # Api for Markdown to pass safe_mode into instance @@ -178,7 +180,7 @@ class Pattern(object): * m: A re match object containing a match of the pattern. """ - pass + pass #pragma: no cover def type(self): """ Return class name, to define pattern type """ @@ -188,9 +190,9 @@ class Pattern(object): """ Return unescaped text given text with an inline placeholder. """ try: stash = self.markdown.treeprocessors['inline'].stashed_nodes - except KeyError: + except KeyError: #pragma: no cover return text - def itertext(el): + def itertext(el): #pragma: no cover ' Reimplement Element.itertext for older python versions ' tag = el.tag if not isinstance(tag, util.string_type) and tag is not None: @@ -210,17 +212,14 @@ class Pattern(object): return value else: # An etree Element - return text content only - return ''.join(itertext(value)) + return ''.join(itertext(value)) return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) class SimpleTextPattern(Pattern): """ Return a simple text of group(2) of a Pattern. """ def handleMatch(self, m): - text = m.group(2) - if text == util.INLINE_PLACEHOLDER_PREFIX: - return None - return text + return m.group(2) class EscapePattern(Pattern): @@ -231,7 +230,7 @@ class EscapePattern(Pattern): if char in self.markdown.ESCAPED_CHARS: return '%s%s%s' % (util.STX, ord(char), util.ETX) else: - return None + return None class SimpleTagPattern(Pattern): @@ -279,6 +278,8 @@ class DoubleTagPattern(SimpleTagPattern): el1 = util.etree.Element(tag1) el2 = util.etree.SubElement(el1, tag2) el2.text = m.group(3) + if len(m.groups())==5: + el2.tail = m.group(4) return el1 @@ -293,7 +294,7 @@ class HtmlPattern(Pattern): """ Return unescaped text given text with an inline placeholder. """ try: stash = self.markdown.treeprocessors['inline'].stashed_nodes - except KeyError: + except KeyError: #pragma: no cover return text def get_stash(m): id = m.group(1) @@ -303,7 +304,7 @@ class HtmlPattern(Pattern): return self.markdown.serializer(value) except: return '\%s' % value - + return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) @@ -323,7 +324,7 @@ class LinkPattern(Pattern): el.set("href", "") if title: - title = dequote(self.unescape(title)) + title = dequote(self.unescape(title)) el.set("title", title) return el @@ -347,20 +348,20 @@ class LinkPattern(Pattern): if not self.markdown.safeMode: # Return immediately bipassing parsing. return url - + try: scheme, netloc, path, params, query, fragment = url = urlparse(url) - except ValueError: + except ValueError: #pragma: no cover # Bad url - so bad it couldn't be parsed. return '' - + locless_schemes = ['', 'mailto', 'news'] allowed_schemes = locless_schemes + ['http', 'https', 'ftp', 'ftps'] if scheme not in allowed_schemes: # Not a known (allowed) scheme. Not safe. return '' - - if netloc == '' and scheme not in locless_schemes: + + if netloc == '' and scheme not in locless_schemes: #pragma: no cover # This should not happen. Treat as suspect. return '' diff --git a/awx/lib/site-packages/markdown/odict.py b/awx/lib/site-packages/markdown/odict.py index 68c12593f7..b158e06a24 100644 --- a/awx/lib/site-packages/markdown/odict.py +++ b/awx/lib/site-packages/markdown/odict.py @@ -82,11 +82,11 @@ class OrderedDict(dict): for key in self.keyOrder: yield self[key] - if util.PY3: + if util.PY3: #pragma: no cover items = _iteritems keys = _iterkeys values = _itervalues - else: + else: #pragma: no cover iteritems = _iteritems iterkeys = _iterkeys itervalues = _itervalues diff --git a/awx/lib/site-packages/markdown/postprocessors.py b/awx/lib/site-packages/markdown/postprocessors.py index 5f3f032c15..7b568adac5 100644 --- a/awx/lib/site-packages/markdown/postprocessors.py +++ b/awx/lib/site-packages/markdown/postprocessors.py @@ -42,7 +42,7 @@ class Postprocessor(util.Processor): (possibly modified) string. """ - pass + pass #pragma: no cover class RawHtmlPostprocessor(Postprocessor): diff --git a/awx/lib/site-packages/markdown/preprocessors.py b/awx/lib/site-packages/markdown/preprocessors.py index 5bfca55530..ed11c39031 100644 --- a/awx/lib/site-packages/markdown/preprocessors.py +++ b/awx/lib/site-packages/markdown/preprocessors.py @@ -41,7 +41,7 @@ class Preprocessor(util.Processor): the (possibly modified) list of lines. """ - pass + pass #pragma: no cover class NormalizeWhitespace(Preprocessor): @@ -174,9 +174,10 @@ class HtmlBlockPreprocessor(Preprocessor): else: # raw html if len(items) - right_listindex <= 1: # last element right_listindex -= 1 + offset = 1 if i == right_listindex else 0 placeholder = self.markdown.htmlStash.store('\n\n'.join( - items[i:right_listindex + 1])) - del items[i:right_listindex + 1] + items[i:right_listindex + offset])) + del items[i:right_listindex + offset] items.insert(i, placeholder) return items diff --git a/awx/lib/site-packages/markdown/serializers.py b/awx/lib/site-packages/markdown/serializers.py index aa828066b4..f53ae31db8 100644 --- a/awx/lib/site-packages/markdown/serializers.py +++ b/awx/lib/site-packages/markdown/serializers.py @@ -42,9 +42,9 @@ from __future__ import unicode_literals from . import util ElementTree = util.etree.ElementTree QName = util.etree.QName -if hasattr(util.etree, 'test_comment'): +if hasattr(util.etree, 'test_comment'): #pragma: no cover Comment = util.etree.test_comment -else: +else: #pragma: no cover Comment = util.etree.Comment PI = util.etree.PI ProcessingInstruction = util.etree.ProcessingInstruction @@ -56,7 +56,7 @@ HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr", try: HTML_EMPTY = set(HTML_EMPTY) -except NameError: +except NameError: #pragma: no cover pass _namespace_map = { @@ -73,7 +73,7 @@ _namespace_map = { } -def _raise_serialization_error(text): +def _raise_serialization_error(text): #pragma: no cover raise TypeError( "cannot serialize %r (type %s)" % (text, type(text).__name__) ) @@ -81,7 +81,7 @@ def _raise_serialization_error(text): def _encode(text, encoding): try: return text.encode(encoding, "xmlcharrefreplace") - except (TypeError, AttributeError): + except (TypeError, AttributeError): #pragma: no cover _raise_serialization_error(text) def _escape_cdata(text): @@ -97,7 +97,7 @@ def _escape_cdata(text): if ">" in text: text = text.replace(">", ">") return text - except (TypeError, AttributeError): + except (TypeError, AttributeError): #pragma: no cover _raise_serialization_error(text) @@ -115,7 +115,7 @@ def _escape_attrib(text): if "\n" in text: text = text.replace("\n", " ") return text - except (TypeError, AttributeError): + except (TypeError, AttributeError): #pragma: no cover _raise_serialization_error(text) def _escape_attrib_html(text): @@ -130,7 +130,7 @@ def _escape_attrib_html(text): if "\"" in text: text = text.replace("\"", """) return text - except (TypeError, AttributeError): + except (TypeError, AttributeError): #pragma: no cover _raise_serialization_error(text) @@ -240,7 +240,7 @@ def _namespaces(elem, default_namespace=None): "default_namespace option" ) qnames[qname] = qname - except TypeError: + except TypeError: #pragma: no cover _raise_serialization_error(qname) # populate qname and namespaces table diff --git a/awx/lib/site-packages/markdown/treeprocessors.py b/awx/lib/site-packages/markdown/treeprocessors.py index ef0a2aa00c..303e4600cd 100644 --- a/awx/lib/site-packages/markdown/treeprocessors.py +++ b/awx/lib/site-packages/markdown/treeprocessors.py @@ -34,11 +34,11 @@ class Treeprocessor(util.Processor): def run(self, root): """ Subclasses of Treeprocessor should implement a `run` method, which - takes a root ElementTree. This method can return another ElementTree - object, and the existing root ElementTree will be replaced, or it can + takes a root ElementTree. This method can return another ElementTree + object, and the existing root ElementTree will be replaced, or it can modify the current tree and return None. """ - pass + pass #pragma: no cover class InlineProcessor(Treeprocessor): @@ -53,6 +53,7 @@ class InlineProcessor(Treeprocessor): + len(self.__placeholder_suffix) self.__placeholder_re = util.INLINE_PLACEHOLDER_RE self.markdown = md + self.inlinePatterns = md.inlinePatterns def __makePlaceholder(self, type): """ Generate a placeholder """ @@ -70,7 +71,7 @@ class InlineProcessor(Treeprocessor): * index: index, from which we start search Returns: placeholder id and string index, after the found placeholder. - + """ m = self.__placeholder_re.search(data, index) if m: @@ -99,9 +100,9 @@ class InlineProcessor(Treeprocessor): """ if not isinstance(data, util.AtomicString): startIndex = 0 - while patternIndex < len(self.markdown.inlinePatterns): + while patternIndex < len(self.inlinePatterns): data, matched, startIndex = self.__applyPattern( - self.markdown.inlinePatterns.value_for_index(patternIndex), + self.inlinePatterns.value_for_index(patternIndex), data, patternIndex, startIndex) if not matched: patternIndex += 1 @@ -128,11 +129,10 @@ class InlineProcessor(Treeprocessor): text = subnode.tail subnode.tail = None - childResult = self.__processPlaceholders(text, subnode) + childResult = self.__processPlaceholders(text, subnode, isText) if not isText and node is not subnode: - pos = list(node).index(subnode) - node.remove(subnode) + pos = list(node).index(subnode) + 1 else: pos = 0 @@ -140,7 +140,7 @@ class InlineProcessor(Treeprocessor): for newChild in childResult: node.insert(pos, newChild) - def __processPlaceholders(self, data, parent): + def __processPlaceholders(self, data, parent, isText=True): """ Process string with placeholders and generate ElementTree tree. @@ -150,7 +150,7 @@ class InlineProcessor(Treeprocessor): * parent: Element, which contains processing inline data Returns: list with ElementTree elements with applied inline patterns. - + """ def linkText(text): if text: @@ -159,6 +159,11 @@ class InlineProcessor(Treeprocessor): result[-1].tail += text else: result[-1].tail = text + elif not isText: + if parent.tail: + parent.tail += text + else: + parent.tail = text else: if parent.text: parent.text += text @@ -182,7 +187,7 @@ class InlineProcessor(Treeprocessor): for child in [node] + list(node): if child.tail: if child.tail.strip(): - self.__processElementText(node, child,False) + self.__processElementText(node, child, False) if child.text: if child.text.strip(): self.__processElementText(child, child) @@ -239,7 +244,7 @@ class InlineProcessor(Treeprocessor): # We need to process current node too for child in [node] + list(node): if not isString(node): - if child.text: + if child.text: child.text = self.__handleInline(child.text, patternIndex + 1) if child.tail: @@ -287,11 +292,10 @@ class InlineProcessor(Treeprocessor): if child.tail: tail = self.__handleInline(child.tail) dumby = util.etree.Element('d') - tailResult = self.__processPlaceholders(tail, dumby) - if dumby.text: - child.tail = dumby.text - else: - child.tail = None + child.tail = None + tailResult = self.__processPlaceholders(tail, dumby, False) + if dumby.tail: + child.tail = dumby.tail pos = list(currElement).index(child) + 1 tailResult.reverse() for newChild in tailResult: @@ -303,7 +307,7 @@ class InlineProcessor(Treeprocessor): if self.markdown.enable_attributes: if element.text and isString(element.text): element.text = \ - inlinepatterns.handleAttributes(element.text, + inlinepatterns.handleAttributes(element.text, element) i = 0 for newChild in lst: @@ -357,4 +361,4 @@ class PrettifyTreeprocessor(Treeprocessor): pres = root.getiterator('pre') for pre in pres: if len(pre) and pre[0].tag == 'code': - pre[0].text = pre[0].text.rstrip() + '\n' + pre[0].text = util.AtomicString(pre[0].text.rstrip() + '\n') diff --git a/awx/lib/site-packages/markdown/util.py b/awx/lib/site-packages/markdown/util.py index edb25886ad..0541e7b409 100644 --- a/awx/lib/site-packages/markdown/util.py +++ b/awx/lib/site-packages/markdown/util.py @@ -10,11 +10,11 @@ Python 3 Stuff """ PY3 = sys.version_info[0] == 3 -if PY3: +if PY3: #pragma: no cover string_type = str text_type = str int2str = chr -else: +else: #pragma: no cover string_type = basestring text_type = unicode int2str = unichr @@ -58,14 +58,15 @@ RTL_BIDI_RANGES = ( ('\u0590', '\u07FF'), # Extensions should use "markdown.util.etree" instead of "etree" (or do `from # markdown.util import etree`). Do not import it by yourself. -try: # Is the C implementation of ElementTree available? +try: #pragma: no cover + # Is the C implementation of ElementTree available? import xml.etree.cElementTree as etree from xml.etree.ElementTree import Comment # Serializers (including ours) test with non-c Comment etree.test_comment = Comment if etree.VERSION < "1.0.5": raise RuntimeError("cElementTree version 1.0.5 or higher is required.") -except (ImportError, RuntimeError): +except (ImportError, RuntimeError): #pragma: no cover # Use the Python implementation of ElementTree? import xml.etree.ElementTree as etree if etree.VERSION < "1.1": @@ -85,15 +86,20 @@ def isBlockLevel(tag): # Some ElementTree tags are not strings, so return False. return False -def parseBoolValue(value, fail_on_errors=True): +def parseBoolValue(value, fail_on_errors=True, preserve_none=False): """Parses a string representing bool value. If parsing was successful, - returns True or False. If parsing was not successful, raises - ValueError, or, if fail_on_errors=False, returns None.""" + returns True or False. If preserve_none=True, returns True, False, + or None. If parsing was not successful, raises ValueError, or, if + fail_on_errors=False, returns None.""" if not isinstance(value, string_type): + if preserve_none and value is None: + return value return bool(value) + elif preserve_none and value.lower() == 'none': + return None elif value.lower() in ('true', 'yes', 'y', 'on', '1'): return True - elif value.lower() in ('false', 'no', 'n', 'off', '0'): + elif value.lower() in ('false', 'no', 'n', 'off', '0', 'none'): return False elif fail_on_errors: raise ValueError('Cannot parse bool value: %r' % value) diff --git a/awx/lib/site-packages/pkg_resources.py b/awx/lib/site-packages/pkg_resources/__init__.py similarity index 74% rename from awx/lib/site-packages/pkg_resources.py rename to awx/lib/site-packages/pkg_resources/__init__.py index bde30989fe..c0c095b253 100644 --- a/awx/lib/site-packages/pkg_resources.py +++ b/awx/lib/site-packages/pkg_resources/__init__.py @@ -14,8 +14,11 @@ The package resource API is designed to work with normal filesystem packages, method. """ +from __future__ import absolute_import + import sys import os +import io import time import re import imp @@ -29,30 +32,27 @@ import token import symbol import operator import platform +import collections +import plistlib +import email.parser +import tempfile from pkgutil import get_importer -try: - from urlparse import urlparse, urlunparse -except ImportError: +PY3 = sys.version_info > (3,) +PY2 = not PY3 + +if PY3: from urllib.parse import urlparse, urlunparse -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset -try: - basestring - next = lambda o: o.next() - from cStringIO import StringIO as BytesIO -except NameError: - basestring = str - from io import BytesIO - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - exec(compile(open(fn).read(), fn, 'exec'), globs, locs) +if PY2: + from urlparse import urlparse, urlunparse + +if PY3: + string_types = str, +else: + string_types = str, eval('unicode') + +iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems() # capture these to bypass sandboxing from os import utime @@ -77,23 +77,142 @@ try: except ImportError: pass -def _bypass_ensure_directory(name, mode=0x1FF): # 0777 - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) +try: + import pkg_resources._vendor.packaging.version + import pkg_resources._vendor.packaging.specifiers + packaging = pkg_resources._vendor.packaging +except ImportError: + # fallback to naturally-installed version; allows system packagers to + # omit vendored packages. + import packaging.version + import packaging.specifiers + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +class _SetuptoolsVersionMixin(object): + + def __hash__(self): + return super(_SetuptoolsVersionMixin, self).__hash__() + + def __lt__(self, other): + if isinstance(other, tuple): + return tuple(self) < other + else: + return super(_SetuptoolsVersionMixin, self).__lt__(other) + + def __le__(self, other): + if isinstance(other, tuple): + return tuple(self) <= other + else: + return super(_SetuptoolsVersionMixin, self).__le__(other) + + def __eq__(self, other): + if isinstance(other, tuple): + return tuple(self) == other + else: + return super(_SetuptoolsVersionMixin, self).__eq__(other) + + def __ge__(self, other): + if isinstance(other, tuple): + return tuple(self) >= other + else: + return super(_SetuptoolsVersionMixin, self).__ge__(other) + + def __gt__(self, other): + if isinstance(other, tuple): + return tuple(self) > other + else: + return super(_SetuptoolsVersionMixin, self).__gt__(other) + + def __ne__(self, other): + if isinstance(other, tuple): + return tuple(self) != other + else: + return super(_SetuptoolsVersionMixin, self).__ne__(other) + + def __getitem__(self, key): + return tuple(self)[key] + + def __iter__(self): + component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) + replace = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + }.get + + def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part == '.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*'+part + + # ensure that alpha/beta/candidate are before final + yield '*final' + + def old_parse_version(s): + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == '00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + # Warn for use of this function + warnings.warn( + "You have iterated over the result of " + "pkg_resources.parse_version. This is a legacy behavior which is " + "inconsistent with the new version class introduced in setuptools " + "8.0. That class should be used directly instead of attempting to " + "iterate over the result.", + RuntimeWarning, + stacklevel=1, + ) + + for part in old_parse_version(str(self)): + yield part + + +class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): + pass + + +class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, + packaging.version.LegacyVersion): + pass + + +def parse_version(v): + try: + return SetuptoolsVersion(v) + except packaging.version.InvalidVersion: + return SetuptoolsLegacyVersion(v) _state_vars = {} def _declare_state(vartype, **kw): - g = globals() - for name, val in kw.items(): - g[name] = val - _state_vars[name] = vartype + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) def __getstate__(): state = {} @@ -143,13 +262,15 @@ def get_supported_platform(): try: plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) except ValueError: - pass # not Mac OS X + # not Mac OS X + pass return plat __all__ = [ # Basic resource access and distribution/entry point discovery 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', 'resource_string', 'resource_stream', 'resource_filename', 'resource_listdir', 'resource_exists', 'resource_isdir', @@ -163,8 +284,11 @@ __all__ = [ 'Distribution', 'Requirement', 'EntryPoint', # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', # Parsing functions and string utilities 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', @@ -193,8 +317,51 @@ class ResolutionError(Exception): def __repr__(self): return self.__class__.__name__+repr(self.args) + class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + class DistributionNotFound(ResolutionError): """A requested distribution was not found""" @@ -221,7 +388,7 @@ def register_loader_type(loader_type, provider_factory): def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): + if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try: module = sys.modules[moduleOrReq] @@ -233,11 +400,9 @@ def get_provider(moduleOrReq): def _macosx_vers(_cache=[]): if not _cache: - import platform version = platform.mac_ver()[0] # fallback for MacPorts if version == '': - import plistlib plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): if hasattr(plistlib, 'readPlist'): @@ -249,7 +414,7 @@ def _macosx_vers(_cache=[]): return _cache[0] def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) def get_build_platform(): """Return this platform's string for platform-specific distributions @@ -278,10 +443,11 @@ def get_build_platform(): macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat +# XXX backward compat +get_platform = get_build_platform -def compatible_platforms(provided,required): +def compatible_platforms(provided, required): """Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. @@ -289,7 +455,8 @@ def compatible_platforms(provided,required): XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided is None or required is None or provided==required: - return True # easy case + # easy case + return True # Mac OS X special cases reqMac = macosVersionString.match(required) @@ -307,13 +474,9 @@ def compatible_platforms(provided,required): macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) return True - return False # egg isn't macosx or legacy darwin + # egg isn't macosx or legacy darwin + return False # are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or \ @@ -338,13 +501,16 @@ def run_script(dist_spec, script_name): ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns) -run_main = run_script # backward compatibility +# backward compatibility +run_main = run_script def get_distribution(dist): """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): + if isinstance(dist, string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist @@ -429,6 +595,48 @@ class WorkingSet(object): for entry in entries: self.add_entry(entry) + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + def add_entry(self, entry): """Add a path item to ``.entries``, finding any distributions on it @@ -444,7 +652,7 @@ class WorkingSet(object): for dist in find_distributions(entry, True): self.add(dist, entry, False) - def __contains__(self,dist): + def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist @@ -460,9 +668,9 @@ class WorkingSet(object): """ dist = self.by_key.get(req.key) if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist + # XXX add more info + raise VersionConflict(dist, req) + return dist def iter_entry_points(self, group, name=None): """Yield entry point objects from `group` matching `name` @@ -524,7 +732,8 @@ class WorkingSet(object): keys = self.entry_keys.setdefault(entry,[]) keys2 = self.entry_keys.setdefault(dist.location,[]) if not replace and dist.key in self.by_key: - return # ignore hidden distros + # ignore hidden distros + return self.by_key[dist.key] = dist if dist.key not in keys: @@ -552,13 +761,21 @@ class WorkingSet(object): it. """ - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} to_activate = [] + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + while requirements: - req = requirements.pop(0) # process dependencies breadth-first + # process dependencies breadth-first + req = requirements.pop(0) if req in processed: # Ignore cyclic or redundant dependencies continue @@ -589,11 +806,21 @@ class WorkingSet(object): to_activate.append(dist) if dist not in req: # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + processed[req] = True - return to_activate # return list of distros to activate + # return list of distros to activate + return to_activate def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): @@ -604,8 +831,10 @@ class WorkingSet(object): distributions, errors = working_set.find_plugins( Environment(plugin_dirlist) ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) The `plugin_env` should be an ``Environment`` instance that contains only distributions that are in the project's "plugin directory" or @@ -630,7 +859,8 @@ class WorkingSet(object): """ plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order + # scan project names in alphabetic order + plugin_projects.sort() error_info = {} distributions = {} @@ -642,7 +872,8 @@ class WorkingSet(object): env = full_env + plugin_env shadow_set = self.__class__([]) - list(map(shadow_set.add, self)) # put all our entries in shadow_set + # put all our entries in shadow_set + list(map(shadow_set.add, self)) for project_name in plugin_projects: @@ -653,13 +884,15 @@ class WorkingSet(object): try: resolvees = shadow_set.resolve(req, env, installer) - except ResolutionError: - v = sys.exc_info()[1] - error_info[dist] = v # save error info + except ResolutionError as v: + # save error info + error_info[dist] = v if fallback: - continue # try the next older version of project + # try the next older version of project + continue else: - break # give up on this project, keep going + # give up on this project, keep going + break else: list(map(shadow_set.add, resolvees)) @@ -718,7 +951,8 @@ class WorkingSet(object): class Environment(object): """Searchable snapshot of distributions on a search path""" - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -736,7 +970,6 @@ class Environment(object): running platform or Python version. """ self._distmap = {} - self._cache = {} self.platform = platform self.python = python self.scan(search_path) @@ -750,7 +983,7 @@ class Environment(object): """ return (self.python is None or dist.py_version is None or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) + and compatible_platforms(dist.platform, self.platform) def remove(self, dist): """Remove `dist` from the environment""" @@ -771,30 +1004,25 @@ class Environment(object): for dist in find_distributions(item): self.add(dist) - def __getitem__(self,project_name): + def __getitem__(self, project_name): """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) + dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) def best_match(self, req, working_set, installer=None): """Find distribution best matching `req` and usable on `working_set` @@ -815,7 +1043,8 @@ class Environment(object): for dist in self[req.key]: if dist in req: return dist - return self.obtain(req, installer) # try and download/install + # try to download/install + return self.obtain(req, installer) def obtain(self, requirement, installer=None): """Obtain a distribution matching `requirement` (e.g. via download) @@ -832,13 +1061,14 @@ class Environment(object): def __iter__(self): """Yield the unique project names of the available distributions""" for key in self._distmap.keys(): - if self[key]: yield key + if self[key]: + yield key def __iadd__(self, other): """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): + if isinstance(other, Distribution): self.add(other) - elif isinstance(other,Environment): + elif isinstance(other, Environment): for project in other: for dist in other[project]: self.add(dist) @@ -854,7 +1084,8 @@ class Environment(object): return new -AvailableDistributions = Environment # XXX backward compatibility +# XXX backward compatibility +AvailableDistributions = Environment class ExtractionError(RuntimeError): @@ -1007,7 +1238,7 @@ variable to point to an accessible directory. if os.name == 'posix': # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777 + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 os.chmod(tempname, mode) def set_extraction_path(self, path): @@ -1064,14 +1295,17 @@ def get_default_cache(): if os.name!='nt': return os.path.expanduser('~/.python-eggs') - app_data = 'Application Data' # XXX this may be locale-specific! + # XXX this may be locale-specific! + app_data = 'Application Data' app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe + # best option, should be locale-safe + (('APPDATA',), None), (('USERPROFILE',), app_data), (('HOMEDRIVE','HOMEPATH'), app_data), (('HOMEPATH',), app_data), (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME + # 95/98/ME + (('WINDIR',), app_data), ] for keys, subdir in app_homes: @@ -1083,7 +1317,7 @@ def get_default_cache(): break else: if subdir: - dirname = os.path.join(dirname,subdir) + dirname = os.path.join(dirname, subdir) return os.path.join(dirname, 'Python-Eggs') else: raise RuntimeError( @@ -1099,13 +1333,15 @@ def safe_name(name): def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) def safe_extra(extra): @@ -1129,8 +1365,8 @@ class MarkerEvaluation(object): values = { 'os_name': lambda: os.name, 'sys_platform': lambda: sys.platform, - 'python_full_version': lambda: sys.version.split()[0], - 'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]), + 'python_full_version': platform.python_version, + 'python_version': lambda: platform.python_version()[:3], 'platform_version': platform.version, 'platform_machine': platform.machine, 'python_implementation': platform.python_implementation, @@ -1144,16 +1380,18 @@ class MarkerEvaluation(object): """ try: cls.evaluate_marker(text) - except SyntaxError: - return cls.normalize_exception(sys.exc_info()[1]) + except SyntaxError as e: + return cls.normalize_exception(e) return False @staticmethod def normalize_exception(exc): """ - Given a SyntaxError from a marker evaluation, normalize the error message: + Given a SyntaxError from a marker evaluation, normalize the error + message: - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error messages. + - Replace platform-specific error messages with standard error + messages. """ subs = { 'unexpected EOF while parsing': 'invalid syntax', @@ -1167,12 +1405,20 @@ class MarkerEvaluation(object): @classmethod def and_test(cls, nodelist): # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.and_, items) @classmethod def test(cls, nodelist): # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) + items = [ + cls.interpret(nodelist[i]) + for i in range(1, len(nodelist), 2) + ] + return functools.reduce(operator.or_, items) @classmethod def atom(cls, nodelist): @@ -1181,12 +1427,14 @@ class MarkerEvaluation(object): if nodelist[2][0] == token.RPAR: raise SyntaxError("Empty parentheses") return cls.interpret(nodelist[2]) - raise SyntaxError("Language feature not supported in environment markers") + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) @classmethod def comparison(cls, nodelist): - if len(nodelist)>4: - raise SyntaxError("Chained comparison not allowed in environment markers") + if len(nodelist) > 4: + msg = "Chained comparison not allowed in environment markers" + raise SyntaxError(msg) comp = nodelist[2][1] cop = comp[1] if comp[0] == token.NAME: @@ -1198,7 +1446,8 @@ class MarkerEvaluation(object): try: cop = cls.get_op(cop) except KeyError: - raise SyntaxError(repr(cop)+" operator not allowed in environment markers") + msg = repr(cop) + " operator not allowed in environment markers" + raise SyntaxError(msg) return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) @classmethod @@ -1224,7 +1473,8 @@ class MarkerEvaluation(object): Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. - This implementation uses the 'parser' module, which is not implemented on + This implementation uses the 'parser' module, which is not implemented + on Jython and has been superseded by the 'ast' module in Python 2.6 and later. """ @@ -1246,8 +1496,7 @@ class MarkerEvaluation(object): env[new_key] = env.pop(key) try: result = _markerlib.interpret(text, env) - except NameError: - e = sys.exc_info()[1] + except NameError as e: raise SyntaxError(e.args[0]) return result @@ -1278,12 +1527,21 @@ class MarkerEvaluation(object): return op() if kind==token.STRING: s = nodelist[1] - if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \ - or '\\' in s: + if not cls._safe_string(s): raise SyntaxError( "Only plain strings allowed in environment markers") return s[1:-1] - raise SyntaxError("Language feature not supported in environment markers") + msg = "Language feature not supported in environment markers" + raise SyntaxError(msg) + + @staticmethod + def _safe_string(cand): + return ( + cand[:1] in "'\"" and + not cand.startswith('"""') and + not cand.startswith("'''") and + '\\' not in cand + ) invalid_marker = MarkerEvaluation.is_invalid_marker evaluate_marker = MarkerEvaluation.evaluate_marker @@ -1303,7 +1561,7 @@ class NullProvider: return self._fn(self.module_path, resource_name) def get_resource_stream(self, manager, resource_name): - return BytesIO(self.get_resource_string(manager, resource_name)) + return io.BytesIO(self.get_resource_string(manager, resource_name)) def get_resource_string(self, manager, resource_name): return self._get(self._fn(self.module_path, resource_name)) @@ -1312,52 +1570,54 @@ class NullProvider: return self._has(self._fn(self.module_path, resource_name)) def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) + return self.egg_info and self._has(self._fn(self.egg_info, name)) if sys.version_info <= (3,): def get_metadata(self, name): if not self.egg_info: return "" - return self._get(self._fn(self.egg_info,name)) + return self._get(self._fn(self.egg_info, name)) else: def get_metadata(self, name): if not self.egg_info: return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") + return self._get(self._fn(self.egg_info, name)).decode("utf-8") def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) - def resource_isdir(self,resource_name): + def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) - def metadata_listdir(self,name): + def metadata_listdir(self, name): if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) + return self._listdir(self._fn(self.egg_info, name)) return [] - def run_script(self,script_name,namespace): + def run_script(self, script_name, namespace): script = 'scripts/'+script_name if not self.has_metadata(script): raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) namespace['__file__'] = script_filename if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) else: from linecache import cache cache[script_filename] = ( len(script_text), 0, script_text.split('\n'), script_filename ) - script_code = compile(script_text,script_filename,'exec') + script_code = compile(script_text, script_filename,'exec') exec(script_code, namespace, namespace) def _has(self, path): @@ -1393,8 +1653,8 @@ register_loader_type(object, NullProvider) class EggProvider(NullProvider): """Provider based on a virtual filesystem""" - def __init__(self,module): - NullProvider.__init__(self,module) + def __init__(self, module): + NullProvider.__init__(self, module) self._setup_prefix() def _setup_prefix(self): @@ -1417,21 +1677,18 @@ class DefaultProvider(EggProvider): def _has(self, path): return os.path.exists(path) - def _isdir(self,path): + def _isdir(self, path): return os.path.isdir(path) - def _listdir(self,path): + def _listdir(self, path): return os.listdir(path) def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') def _get(self, path): - stream = open(path, 'rb') - try: + with open(path, 'rb') as stream: return stream.read() - finally: - stream.close() register_loader_type(type(None), DefaultProvider) @@ -1442,9 +1699,9 @@ if importlib_bootstrap is not None: class EmptyProvider(NullProvider): """Provider that returns nothing for all requests""" - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] module_path = None def __init__(self): @@ -1453,47 +1710,81 @@ class EmptyProvider(NullProvider): empty_provider = EmptyProvider() -def build_zipmanifest(path): +class ZipManifests(dict): + """ + zip manifest builder """ - This builds a similar dictionary to the zipimport directory - caches. However instead of tuples, ZipInfo objects are stored. - The translation of the tuple is as follows: - * [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep - on pypy it is the same (one reason why distribute did work - in some cases on pypy and win32). - * [1] - zipinfo.compress_type - * [2] - zipinfo.compress_size - * [3] - zipinfo.file_size - * [4] - len(utf-8 encoding of filename) if zipinfo & 0x800 - len(ascii encoding of filename) otherwise - * [5] - (zipinfo.date_time[0] - 1980) << 9 | - zipinfo.date_time[1] << 5 | zipinfo.date_time[2] - * [6] - (zipinfo.date_time[3] - 1980) << 11 | - zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2) - * [7] - zipinfo.CRC + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): """ - zipinfo = dict() - zfile = zipfile.ZipFile(path) - #Got ZipFile has not __exit__ on python 3.1 - try: - for zitem in zfile.namelist(): - zpath = zitem.replace('/', os.sep) - zipinfo[zpath] = zfile.getinfo(zitem) - assert zipinfo[zpath] is not None - finally: - zfile.close() - return zipinfo + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) class ZipProvider(EggProvider): """Resource support for zips and eggs""" eagers = None + _zip_manifests = MemoizedZipManifests() def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = build_zipmanifest(self.loader.archive) + EggProvider.__init__(self, module) self.zip_pre = self.loader.archive+os.sep def _zipinfo_name(self, fspath): @@ -1502,18 +1793,23 @@ class ZipProvider(EggProvider): if fspath.startswith(self.zip_pre): return fspath[len(self.zip_pre):] raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) + "%s is not a subpath of %s" % (fspath, self.zip_pre) ) - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre+zip_path if fspath.startswith(self.egg_root+os.sep): return fspath[len(self.egg_root)+1:].split(os.sep) raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) + "%s is not a subpath of %s" % (fspath, self.egg_root) ) + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + def get_resource_filename(self, manager, resource_name): if not self.egg_name: raise NotImplementedError( @@ -1530,8 +1826,9 @@ class ZipProvider(EggProvider): @staticmethod def _get_date_and_size(zip_stat): size = zip_stat.file_size - date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst - #1980 offset already done + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done timestamp = time.mktime(date_time) return timestamp, size @@ -1542,7 +1839,8 @@ class ZipProvider(EggProvider): last = self._extract_resource( manager, os.path.join(zip_path, name) ) - return os.path.dirname(last) # return the extracted directory name + # return the extracted directory name + return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) @@ -1561,7 +1859,7 @@ class ZipProvider(EggProvider): outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) os.write(outf, self.loader.get_data(zip_path)) os.close(outf) - utime(tmpnam, (timestamp,timestamp)) + utime(tmpnam, (timestamp, timestamp)) manager.postprocess(tmpnam, real_path) try: @@ -1573,14 +1871,16 @@ class ZipProvider(EggProvider): # the file became current since it was checked above, # so proceed. return real_path - elif os.name=='nt': # Windows, del old file and retry + # Windows, del old file and retry + elif os.name=='nt': unlink(real_path) rename(tmpnam, real_path) return real_path raise except os.error: - manager.extraction_error() # report a user-friendly error + # report a user-friendly error + manager.extraction_error() return real_path @@ -1596,9 +1896,8 @@ class ZipProvider(EggProvider): return False # check that the contents match zip_contents = self.loader.get_data(zip_path) - f = open(file_path, 'rb') - file_contents = f.read() - f.close() + with open(file_path, 'rb') as f: + file_contents = f.read() return zip_contents == file_contents def _get_eager_resources(self): @@ -1631,17 +1930,17 @@ class ZipProvider(EggProvider): zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() - def _isdir(self,fspath): + def _isdir(self, fspath): return self._zipinfo_name(fspath) in self._index() - def _listdir(self,fspath): + def _listdir(self, fspath): return list(self._index().get(self._zipinfo_name(fspath), ())) - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) register_loader_type(zipimport.zipimporter, ZipProvider) @@ -1658,21 +1957,20 @@ class FileMetadata(EmptyProvider): the provided location. """ - def __init__(self,path): + def __init__(self, path): self.path = path - def has_metadata(self,name): + def has_metadata(self, name): return name=='PKG-INFO' - def get_metadata(self,name): + def get_metadata(self, name): if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() + with open(self.path,'rU') as f: + metadata = f.read() return metadata raise KeyError("No metadata except PKG-INFO is available") - def get_metadata_lines(self,name): + def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) @@ -1687,7 +1985,7 @@ class PathMetadata(DefaultProvider): base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: @@ -1707,7 +2005,6 @@ class EggMetadata(ZipProvider): def __init__(self, importer): """Create a metadata provider from a zipimporter""" - self.zipinfo = build_zipmanifest(importer.archive) self.zip_pre = importer.archive+os.sep self.loader = importer if importer.prefix: @@ -1746,7 +2043,8 @@ def find_eggs_in_zip(importer, path_item, only=False): if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: - return # don't yield nested distros + # don't yield nested distros + return for subitem in metadata.resource_listdir('/'): if subitem.endswith('.egg'): subpath = os.path.join(path_item, subitem) @@ -1757,7 +2055,7 @@ register_finder(zipimport.zipimporter, find_eggs_in_zip) def find_nothing(importer, path_item, only=False): return () -register_finder(object,find_nothing) +register_finder(object, find_nothing) def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" @@ -1783,23 +2081,24 @@ def find_on_path(importer, path_item, only=False): else: metadata = FileMetadata(fullpath) yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST + path_item, entry, metadata, precedence=DEVELOP_DIST ) elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): + dists = find_distributions(os.path.join(path_item, entry)) + for dist in dists: yield dist elif not only and lower.endswith('.egg-link'): - entry_file = open(os.path.join(path_item, entry)) - try: + with open(os.path.join(path_item, entry)) as entry_file: entry_lines = entry_file.readlines() - finally: - entry_file.close() for line in entry_lines: - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): + if not line.strip(): + continue + path = os.path.join(path_item, line.rstrip()) + dists = find_distributions(path) + for item in dists: yield item break -register_finder(pkgutil.ImpImporter,find_on_path) +register_finder(pkgutil.ImpImporter, find_on_path) if importlib_bootstrap is not None: register_finder(importlib_bootstrap.FileFinder, find_on_path) @@ -1814,7 +2113,7 @@ def register_namespace_handler(importer_type, namespace_handler): `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: - def namespace_handler(importer,path_entry,moduleName,module): + def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already @@ -1890,7 +2189,8 @@ def fixup_namespace_packages(path_item, parent=None): try: for package in _namespace_packages.get(parent,()): subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) + if subpath: + fixup_namespace_packages(subpath, package) finally: imp.release_lock() @@ -1906,8 +2206,8 @@ def file_ns_handler(importer, path_item, packageName, module): # Only return the path if it's not already there return subpath -register_namespace_handler(pkgutil.ImpImporter,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) if importlib_bootstrap is not None: register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler) @@ -1916,14 +2216,14 @@ if importlib_bootstrap is not None: def null_ns_handler(importer, path_item, packageName, module): return None -register_namespace_handler(object,null_ns_handler) +register_namespace_handler(object, null_ns_handler) def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(filename)) -def _normalize_cached(filename,_cache={}): +def _normalize_cached(filename, _cache={}): try: return _cache[filename] except KeyError: @@ -1939,22 +2239,28 @@ def _set_parent_ns(packageName): def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, string_types): for s in strs.splitlines(): s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments + # skip blank lines/comments + if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items +# whitespace and comment +LINE_END = re.compile(r"\s*(#.*)?$").match +# line continuation +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match +# Distribution or extra +DISTRO = re.compile(r"\s*((\w|[-.])+)").match +# ver. info +VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match +# comma between items +COMMA = re.compile(r"\s*,").match OBRACKET = re.compile(r"\s*\[").match CBRACKET = re.compile(r"\s*\]").match MODULE = re.compile(r"\w+(\.\w+)*$").match @@ -1964,62 +2270,7 @@ EGG_NAME = re.compile( re.VERBOSE | re.IGNORECASE ).match -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) class EntryPoint(object): """Object representing an advertised importable object""" @@ -2043,21 +2294,46 @@ class EntryPoint(object): def __repr__(self): return "EntryPoint.parse(%r)" % str(self) - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + DeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) def require(self, env=None, installer=None): if self.extras and not self.dist: raise UnknownExtra("Can't require() without a distribution", self) - list(map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer))) + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P<name>.+?)\s*' + r'=\s*' + r'(?P<module>[\w.]+)\s*' + r'(:\s*(?P<attr>[\w.]+))?\s*' + r'(?P<extras>\[.*\])?\s*$' + ) @classmethod def parse(cls, src, dist=None): @@ -2065,31 +2341,28 @@ class EntryPoint(object): Entry point syntax follows the form:: - name = some.module:some.attr [extra1,extra2] + name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras @classmethod def parse_group(cls, group, lines, dist=None): @@ -2107,7 +2380,7 @@ class EntryPoint(object): @classmethod def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" - if isinstance(data,dict): + if isinstance(data, dict): data = data.items() else: data = split_sections(data) @@ -2150,7 +2423,7 @@ class Distribution(object): self._provider = metadata or empty_provider @classmethod - def from_location(cls,location,basename,metadata=None,**kw): + def from_location(cls, location, basename, metadata=None,**kw): project_name, version, py_version, platform = [None]*4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: @@ -2166,30 +2439,38 @@ class Distribution(object): py_version=py_version, platform=platform, **kw ) - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), + @property + def hashcmp(self): + return ( + self.parsed_version, self.precedence, self.key, _remove_md5_fragment(self.location), self.py_version, - self.platform + self.platform, ) - ) - def __hash__(self): return hash(self.hashcmp) + + def __hash__(self): + return hash(self.hashcmp) + def __lt__(self, other): return self.hashcmp < other.hashcmp + def __le__(self, other): return self.hashcmp <= other.hashcmp + def __gt__(self, other): return self.hashcmp > other.hashcmp + def __ge__(self, other): return self.hashcmp >= other.hashcmp + def __eq__(self, other): if not isinstance(other, self.__class__): # It's not a Distribution, so they are not equal return False return self.hashcmp == other.hashcmp + def __ne__(self, other): return not self == other @@ -2207,11 +2488,29 @@ class Distribution(object): @property def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + if isinstance( + self._parsed_version, packaging.version.LegacyVersion): + # While an empty version is techincally a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if self.version: + warnings.warn( + "'%s (%s)' is being parsed as a legacy, non PEP 440, " + "version. You may find odd behavior and sort order. " + "In particular it will be sorted as less than 0.0. It " + "is recommend to migrate to PEP 440 compatible " + "versions." % ( + self.project_name, self.version, + ), + PEP440Warning, + ) + + return self._parsed_version @property def version(self): @@ -2223,9 +2522,8 @@ class Distribution(object): self._version = safe_version(line.split(':',1)[1].strip()) return self._version else: - raise ValueError( - "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self - ) + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) @property def _dep_map(self): @@ -2234,23 +2532,24 @@ class Distribution(object): except AttributeError: dm = self.__dep_map = {None: []} for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): + for extra, reqs in split_sections(self._get_metadata(name)): if extra: if ':' in extra: - extra, marker = extra.split(':',1) + extra, marker = extra.split(':', 1) if invalid_marker(marker): - reqs=[] # XXX warn + # XXX warn + reqs=[] elif not evaluate_marker(marker): reqs=[] extra = safe_extra(extra) or None dm.setdefault(extra,[]).extend(parse_requirements(reqs)) return dm - def requires(self,extras=()): + def requires(self, extras=()): """List of Requirements needed for this distro if `extras` are used""" dm = self._dep_map deps = [] - deps.extend(dm.get(None,())) + deps.extend(dm.get(None, ())) for ext in extras: try: deps.extend(dm[safe_extra(ext)]) @@ -2260,18 +2559,21 @@ class Distribution(object): ) return deps - def _get_metadata(self,name): + def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line - def activate(self,path=None): + def activate(self, path=None): """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path + if path is None: + path = sys.path self.insert_on(path) if path is sys.path: fixup_namespace_packages(self.location) - list(map(declare_namespace, self._get_metadata('namespace_packages.txt'))) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) def egg_name(self): """Return what this distribution's standard .egg filename should be""" @@ -2281,29 +2583,31 @@ class Distribution(object): ) if self.platform: - filename += '-'+self.platform + filename += '-' + self.platform return filename def __repr__(self): if self.location: - return "%s (%s)" % (self,self.location) + return "%s (%s)" % (self, self.location) else: return str(self) def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None + try: + version = getattr(self, 'version', None) + except ValueError: + version = None version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) + return "%s %s" % (self.project_name, version) - def __getattr__(self,attr): + def __getattr__(self, attr): """Delegate all unrecognized public attributes to .metadata provider""" if attr.startswith('_'): raise AttributeError(attr) return getattr(self._provider, attr) @classmethod - def from_filename(cls,filename,metadata=None, **kw): + def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( _normalize_cached(filename), os.path.basename(filename), metadata, **kw @@ -2311,13 +2615,18 @@ class Distribution(object): def as_requirement(self): """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) def load_entry_point(self, group, name): """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) + ep = self.get_entry_info(group, name) if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) + raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load() def get_entry_map(self, group=None): @@ -2348,9 +2657,9 @@ class Distribution(object): npath= [(p and _normalize_cached(p) or p) for p in path] for p, item in enumerate(npath): - if item==nloc: + if item == nloc: break - elif item==bdir and self.precedence==EGG_DIST: + elif item == bdir and self.precedence == EGG_DIST: # if it's an .egg, give it precedence over its directory if path is sys.path: self.check_version_conflict() @@ -2364,20 +2673,22 @@ class Distribution(object): return # p is the spot where we found or inserted loc; now remove duplicates - while 1: + while True: try: np = npath.index(nloc, p+1) except ValueError: break else: del npath[np], path[np] - p = np # ha! + # ha! + p = np return def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) @@ -2400,17 +2711,15 @@ class Distribution(object): try: self.version except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) + issue_warning("Unbuilt egg for " + repr(self)) return False return True def clone(self,**kw): """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) kw.setdefault('metadata', self._provider) return self.__class__(**kw) @@ -2430,8 +2739,8 @@ class DistInfoDistribution(Distribution): try: return self._pkg_info except AttributeError: - from email.parser import Parser - self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO)) + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) return self._pkg_info @property @@ -2499,25 +2808,23 @@ def issue_warning(*args,**kw): level += 1 except ValueError: pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) + warnings.warn(stacklevel=level + 1, *args, **kw) def parse_requirements(strs): """Yield ``Requirement`` objects for each specification in `strs` - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. + `strs` must be a string, or a (possibly-nested) iterable thereof. """ # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): + def scan_list(ITEM, TERMINATOR, line, p, groups, item_name): items = [] - while not TERMINATOR(line,p): - if CONTINUE(line,p): + while not TERMINATOR(line, p): + if CONTINUE(line, p): try: line = next(lines) p = 0 @@ -2526,23 +2833,26 @@ def parse_requirements(strs): "\\ must not appear on the last nonblank line" ) - match = ITEM(line,p) + match = ITEM(line, p) if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) + msg = "Expected " + item_name + " in" + raise ValueError(msg, line, "at", line[p:]) items.append(match.group(*groups)) p = match.end() - match = COMMA(line,p) + match = COMMA(line, p) if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) + # skip the comma + p = match.end() + elif not TERMINATOR(line, p): + msg = "Expected ',' or end-of-list in" + raise ValueError(msg, line, "at", line[p:]) - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any + match = TERMINATOR(line, p) + # skip the terminator, if any + if match: + p = match.end() return line, p, items for line in lines: @@ -2553,67 +2863,59 @@ def parse_requirements(strs): p = match.end() extras = [] - match = OBRACKET(line,p) + match = OBRACKET(line, p) if match: p = match.end() line, p, extras = scan_list( DISTRO, CBRACKET, line, p, (1,), "'extra' name" ) - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] + line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2), + "version spec") + specs = [(op, val) for op, val in specs] yield Requirement(project_name, specs, extras) -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - class Requirement: def __init__(self, project_name, specs, extras): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" self.unsafe_name, project_name = project_name, safe_name(project_name) self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) + self.specifier = packaging.specifiers.SpecifierSet( + ",".join(["".join([x, y]) for x, y in specs]) + ) + self.specs = specs + self.extras = tuple(map(safe_extra, extras)) self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) + self.key, + self.specifier, + frozenset(self.extras), ) self.__hash = hash(self.hashCmp) def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) + if extras: + extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, self.specifier) - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key != self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: return False - elif action=='T': - return True - elif action=='+': - last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) def __hash__(self): return self.__hash @@ -2624,26 +2926,16 @@ class Requirement: def parse(s): reqs = list(parse_requirements(s)) if reqs: - if len(reqs)==1: + if len(reqs) == 1: return reqs[0] raise ValueError("Expected only one requirement", s) raise ValueError("No requirements found", s) -state_machine = { - # =>< - '<': '--T', - '<=': 'T-T', - '>': 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - def _get_mro(cls): """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass + if not isinstance(cls, type): + class cls(cls, object): pass return cls.__mro__[1:] return cls.__mro__ @@ -2660,8 +2952,19 @@ def ensure_directory(path): if not os.path.isdir(dirname): os.makedirs(dirname) + +def _bypass_ensure_directory(path, mode=0o777): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, mode) + + def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs + """Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and @@ -2686,13 +2989,21 @@ def split_sections(s): yield section, content def _mkstemp(*args,**kw): - from tempfile import mkstemp old_open = os.open try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args,**kw) finally: - os.open = old_open # and then put it back + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) # Set up global resource manager (deliberately not state-saved) @@ -2704,35 +3015,19 @@ def _initialize(g): _initialize(globals()) # Prepare the master working set and make the ``require()`` API available -_declare_state('object', working_set = WorkingSet()) -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path +working_set = WorkingSet._build_master() +_declare_state('object', working_set=working_set) require = working_set.require iter_entry_points = working_set.iter_entry_points add_activation_listener = working_set.subscribe run_script = working_set.run_script -run_main = run_script # backward compatibility +# backward compatibility +run_main = run_script # Activate all distributions already on sys.path, and ensure that # all distributions added to the working set in the future (e.g. by # calling ``require()``) will get activated as well. add_activation_listener(lambda dist: dist.activate()) working_set.entries=[] -list(map(working_set.add_entry,sys.path)) # match order +# match order +list(map(working_set.add_entry, sys.path)) diff --git a/awx/lib/site-packages/pkg_resources/_vendor/__init__.py b/awx/lib/site-packages/pkg_resources/_vendor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 0000000000..36f1a35c85 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,31 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "15.0" + +__author__ = "Donald Stufft" +__email__ = "donald@stufft.io" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2014 %s" % __author__ diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 0000000000..c39a8eab8e --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000000..5c396ceac6 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,40 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000000..0ae9bb52a2 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,78 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000..9ad0a635ed --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,772 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self._spec[0])(item, self._spec[1]) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease + and not (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^\s]* # We just match everything, except for whitespace since this + # is a "legacy" specifier and the version string can be just + # about anything. + ) + \s* + $ + """, + re.VERBOSE | re.IGNORECASE, + ) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex = re.compile( + r""" + ^ + \s* + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + \s* + $ + """, + re.VERBOSE | re.IGNORECASE, + ) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") + and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) + and self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split):]) + right_split.append(left[len(right_split):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + # Note: The use of any() here means that an empty set of specifiers + # will always return False, this is an explicit design decision. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if (not (self.prereleases or prereleases)) and item.is_prerelease: + return False + + # Determine if we're forcing a prerelease or not, we bypass + # self.prereleases here and use self._prereleases because we want to + # only take into consideration actual *forced* values. The underlying + # specifiers will handle the other logic. + # The logic here is: If prereleases is anything but None, we'll just + # go aheand and continue to use that. However if + # prereleases is None, then we'll use whatever the + # value of self._prereleases is as long as it is not + # None itself. + if prereleases is None and self._prereleases is not None: + prereleases = self._prereleases + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, we bypass + # self.prereleases here and use self._prereleases because we want to + # only take into consideration actual *forced* values. The underlying + # specifiers will handle the other logic. + # The logic here is: If prereleases is anything but None, we'll just + # go aheand and continue to use that. However if + # prereleases is None, then we'll use whatever the + # value of self._prereleases is as long as it is not + # None itself. + if prereleases is None and self._prereleases is not None: + prereleases = self._prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=prereleases) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py b/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000000..cf8afb16d6 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,401 @@ +# Copyright 2014 Donald Stufft +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile( + r"^\s*" + VERSION_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version( + match.group("pre_l"), + match.group("pre_n"), + ), + post=_parse_letter_version( + match.group("post_l"), + match.group("post_n1") or match.group("post_n2"), + ), + dev=_parse_letter_version( + match.group("dev_l"), + match.group("dev_n"), + ), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + # Pre-release + if self._version.pre is not None: + parts.append("".join(str(x) for x in self._version.pre)) + + # Post-release + if self._version.post is not None: + parts.append(".post{0}".format(self._version.post[1])) + + # Development release + if self._version.dev is not None: + parts.append(".dev{0}".format(self._version.dev[1])) + + # Local version segment + if self._version.local is not None: + parts.append( + "+{0}".format(".".join(str(x) for x in self._version.local)) + ) + + return "".join(parts) + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + return "".join(parts) + + @property + def local(self): + version_string = str(self) + if "+" in version_string: + return version_string.split("+", 1)[1] + + @property + def is_prerelease(self): + return bool(self._version.dev or self._version.pre) + + @property + def is_postrelease(self): + return bool(self._version.post) + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_seperators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_seperators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list( + itertools.dropwhile( + lambda x: x == 0, + reversed(release), + ) + )) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple( + (i, "") if isinstance(i, int) else (-Infinity, i) + for i in local + ) + + return epoch, release, pre, post, dev, local diff --git a/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt b/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt new file mode 100644 index 0000000000..75a31670c6 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/_vendor/vendored.txt @@ -0,0 +1 @@ +packaging==15.0 diff --git a/awx/lib/site-packages/pkg_resources/api_tests.txt b/awx/lib/site-packages/pkg_resources/api_tests.txt new file mode 100644 index 0000000000..a6c25a378f --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/api_tests.txt @@ -0,0 +1,419 @@ +Pluggable Distributions of Python Software +========================================== + +Distributions +------------- + +A "Distribution" is a collection of files that represent a "Release" of a +"Project" as of a particular point in time, denoted by a +"Version":: + + >>> import sys, pkg_resources + >>> from pkg_resources import Distribution + >>> Distribution(project_name="Foo", version="1.2") + Foo 1.2 + +Distributions have a location, which can be a filename, URL, or really anything +else you care to use:: + + >>> dist = Distribution( + ... location="http://example.com/something", + ... project_name="Bar", version="0.9" + ... ) + + >>> dist + Bar 0.9 (http://example.com/something) + + +Distributions have various introspectable attributes:: + + >>> dist.location + 'http://example.com/something' + + >>> dist.project_name + 'Bar' + + >>> dist.version + '0.9' + + >>> dist.py_version == sys.version[:3] + True + + >>> print(dist.platform) + None + +Including various computed attributes:: + + >>> from pkg_resources import parse_version + >>> dist.parsed_version == parse_version(dist.version) + True + + >>> dist.key # case-insensitive form of the project name + 'bar' + +Distributions are compared (and hashed) by version first:: + + >>> Distribution(version='1.0') == Distribution(version='1.0') + True + >>> Distribution(version='1.0') == Distribution(version='1.1') + False + >>> Distribution(version='1.0') < Distribution(version='1.1') + True + +but also by project name (case-insensitive), platform, Python version, +location, etc.:: + + >>> Distribution(project_name="Foo",version="1.0") == \ + ... Distribution(project_name="Foo",version="1.0") + True + + >>> Distribution(project_name="Foo",version="1.0") == \ + ... Distribution(project_name="foo",version="1.0") + True + + >>> Distribution(project_name="Foo",version="1.0") == \ + ... Distribution(project_name="Foo",version="1.1") + False + + >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \ + ... Distribution(project_name="Foo",py_version="2.4",version="1.0") + False + + >>> Distribution(location="spam",version="1.0") == \ + ... Distribution(location="spam",version="1.0") + True + + >>> Distribution(location="spam",version="1.0") == \ + ... Distribution(location="baz",version="1.0") + False + + + +Hash and compare distribution by prio/plat + +Get version from metadata +provider capabilities +egg_name() +as_requirement() +from_location, from_filename (w/path normalization) + +Releases may have zero or more "Requirements", which indicate +what releases of another project the release requires in order to +function. A Requirement names the other project, expresses some criteria +as to what releases of that project are acceptable, and lists any "Extras" +that the requiring release may need from that project. (An Extra is an +optional feature of a Release, that can only be used if its additional +Requirements are satisfied.) + + + +The Working Set +--------------- + +A collection of active distributions is called a Working Set. Note that a +Working Set can contain any importable distribution, not just pluggable ones. +For example, the Python standard library is an importable distribution that +will usually be part of the Working Set, even though it is not pluggable. +Similarly, when you are doing development work on a project, the files you are +editing are also a Distribution. (And, with a little attention to the +directory names used, and including some additional metadata, such a +"development distribution" can be made pluggable as well.) + + >>> from pkg_resources import WorkingSet + +A working set's entries are the sys.path entries that correspond to the active +distributions. By default, the working set's entries are the items on +``sys.path``:: + + >>> ws = WorkingSet() + >>> ws.entries == sys.path + True + +But you can also create an empty working set explicitly, and add distributions +to it:: + + >>> ws = WorkingSet([]) + >>> ws.add(dist) + >>> ws.entries + ['http://example.com/something'] + >>> dist in ws + True + >>> Distribution('foo',version="") in ws + False + +And you can iterate over its distributions:: + + >>> list(ws) + [Bar 0.9 (http://example.com/something)] + +Adding the same distribution more than once is a no-op:: + + >>> ws.add(dist) + >>> list(ws) + [Bar 0.9 (http://example.com/something)] + +For that matter, adding multiple distributions for the same project also does +nothing, because a working set can only hold one active distribution per +project -- the first one added to it:: + + >>> ws.add( + ... Distribution( + ... 'http://example.com/something', project_name="Bar", + ... version="7.2" + ... ) + ... ) + >>> list(ws) + [Bar 0.9 (http://example.com/something)] + +You can append a path entry to a working set using ``add_entry()``:: + + >>> ws.entries + ['http://example.com/something'] + >>> ws.add_entry(pkg_resources.__file__) + >>> ws.entries + ['http://example.com/something', '...pkg_resources...'] + +Multiple additions result in multiple entries, even if the entry is already in +the working set (because ``sys.path`` can contain the same entry more than +once):: + + >>> ws.add_entry(pkg_resources.__file__) + >>> ws.entries + ['...example.com...', '...pkg_resources...', '...pkg_resources...'] + +And you can specify the path entry a distribution was found under, using the +optional second parameter to ``add()``:: + + >>> ws = WorkingSet([]) + >>> ws.add(dist,"foo") + >>> ws.entries + ['foo'] + +But even if a distribution is found under multiple path entries, it still only +shows up once when iterating the working set: + + >>> ws.add_entry(ws.entries[0]) + >>> list(ws) + [Bar 0.9 (http://example.com/something)] + +You can ask a WorkingSet to ``find()`` a distribution matching a requirement:: + + >>> from pkg_resources import Requirement + >>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None + None + + >>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution + Bar 0.9 (http://example.com/something) + +Note that asking for a conflicting version of a distribution already in a +working set triggers a ``pkg_resources.VersionConflict`` error: + + >>> try: + ... ws.find(Requirement.parse("Bar==1.0")) + ... except pkg_resources.VersionConflict as exc: + ... print(str(exc)) + ... else: + ... raise AssertionError("VersionConflict was not raised") + (Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0')) + +You can subscribe a callback function to receive notifications whenever a new +distribution is added to a working set. The callback is immediately invoked +once for each existing distribution in the working set, and then is called +again for new distributions added thereafter:: + + >>> def added(dist): print("Added %s" % dist) + >>> ws.subscribe(added) + Added Bar 0.9 + >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") + >>> ws.add(foo12) + Added Foo 1.2 + +Note, however, that only the first distribution added for a given project name +will trigger a callback, even during the initial ``subscribe()`` callback:: + + >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") + >>> ws.add(foo14) # no callback, because Foo 1.2 is already active + + >>> ws = WorkingSet([]) + >>> ws.add(foo12) + >>> ws.add(foo14) + >>> ws.subscribe(added) + Added Foo 1.2 + +And adding a callback more than once has no effect, either:: + + >>> ws.subscribe(added) # no callbacks + + # and no double-callbacks on subsequent additions, either + >>> just_a_test = Distribution(project_name="JustATest", version="0.99") + >>> ws.add(just_a_test) + Added JustATest 0.99 + + +Finding Plugins +--------------- + +``WorkingSet`` objects can be used to figure out what plugins in an +``Environment`` can be loaded without any resolution errors:: + + >>> from pkg_resources import Environment + + >>> plugins = Environment([]) # normally, a list of plugin directories + >>> plugins.add(foo12) + >>> plugins.add(foo14) + >>> plugins.add(just_a_test) + +In the simplest case, we just get the newest version of each distribution in +the plugin environment:: + + >>> ws = WorkingSet([]) + >>> ws.find_plugins(plugins) + ([JustATest 0.99, Foo 1.4 (f14)], {}) + +But if there's a problem with a version conflict or missing requirements, the +method falls back to older versions, and the error info dict will contain an +exception instance for each unloadable plugin:: + + >>> ws.add(foo12) # this will conflict with Foo 1.4 + >>> ws.find_plugins(plugins) + ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)}) + +But if you disallow fallbacks, the failed plugin will be skipped instead of +trying older versions:: + + >>> ws.find_plugins(plugins, fallback=False) + ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)}) + + + +Platform Compatibility Rules +---------------------------- + +On the Mac, there are potential compatibility issues for modules compiled +on newer versions of Mac OS X than what the user is running. Additionally, +Mac OS X will soon have two platforms to contend with: Intel and PowerPC. + +Basic equality works as on other platforms:: + + >>> from pkg_resources import compatible_platforms as cp + >>> reqd = 'macosx-10.4-ppc' + >>> cp(reqd, reqd) + True + >>> cp("win32", reqd) + False + +Distributions made on other machine types are not compatible:: + + >>> cp("macosx-10.4-i386", reqd) + False + +Distributions made on earlier versions of the OS are compatible, as +long as they are from the same top-level version. The patchlevel version +number does not matter:: + + >>> cp("macosx-10.4-ppc", reqd) + True + >>> cp("macosx-10.3-ppc", reqd) + True + >>> cp("macosx-10.5-ppc", reqd) + False + >>> cp("macosx-9.5-ppc", reqd) + False + +Backwards compatibility for packages made via earlier versions of +setuptools is provided as well:: + + >>> cp("darwin-8.2.0-Power_Macintosh", reqd) + True + >>> cp("darwin-7.2.0-Power_Macintosh", reqd) + True + >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc") + False + + +Environment Markers +------------------- + + >>> from pkg_resources import invalid_marker as im, evaluate_marker as em + >>> import os + + >>> print(im("sys_platform")) + Comparison or logical expression expected + + >>> print(im("sys_platform==")) + invalid syntax + + >>> print(im("sys_platform=='win32'")) + False + + >>> print(im("sys=='x'")) + Unknown name 'sys' + + >>> print(im("(extra)")) + Comparison or logical expression expected + + >>> print(im("(extra")) + invalid syntax + + >>> print(im("os.open('foo')=='y'")) + Language feature not supported in environment markers + + >>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit! + Language feature not supported in environment markers + + >>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit! + Language feature not supported in environment markers + + >>> print(im("'x' < 'y'")) + '<' operator not allowed in environment markers + + >>> print(im("'x' < 'y' < 'z'")) + Chained comparison not allowed in environment markers + + >>> print(im("r'x'=='x'")) + Only plain strings allowed in environment markers + + >>> print(im("'''x'''=='x'")) + Only plain strings allowed in environment markers + + >>> print(im('"""x"""=="x"')) + Only plain strings allowed in environment markers + + >>> print(im(r"'x\n'=='x'")) + Only plain strings allowed in environment markers + + >>> print(im("os.open=='y'")) + Language feature not supported in environment markers + + >>> em('"x"=="x"') + True + + >>> em('"x"=="y"') + False + + >>> em('"x"=="y" and "x"=="x"') + False + + >>> em('"x"=="y" or "x"=="x"') + True + + >>> em('"x"=="y" and "x"=="q" or "z"=="z"') + True + + >>> em('"x"=="y" and ("x"=="q" or "z"=="z")') + False + + >>> em('"x"=="y" and "z"=="z" or "x"=="q"') + False + + >>> em('"x"=="x" and "z"=="z" or "x"=="q"') + True + + >>> em("sys_platform=='win32'") == (sys.platform=='win32') + True + + >>> em("'x' in 'yx'") + True + + >>> em("'yx' in 'x'") + False diff --git a/awx/lib/site-packages/pkg_resources/tests/__init__.py b/awx/lib/site-packages/pkg_resources/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py b/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py new file mode 100644 index 0000000000..564d7cec4f --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/tests/test_pkg_resources.py @@ -0,0 +1,111 @@ +import sys +import tempfile +import os +import zipfile +import datetime +import time +import subprocess + +import pkg_resources + +try: + unicode +except NameError: + unicode = str + +def timestamp(dt): + """ + Return a timestamp for a local, naive datetime instance. + """ + try: + return dt.timestamp() + except AttributeError: + # Python 3.2 and earlier + return time.mktime(dt.timetuple()) + +class EggRemover(unicode): + def __call__(self): + if self in sys.path: + sys.path.remove(self) + if os.path.exists(self): + os.remove(self) + +class TestZipProvider(object): + finalizers = [] + + ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0) + "A reference time for a file modification" + + @classmethod + def setup_class(cls): + "create a zip egg and add it to sys.path" + egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False) + zip_egg = zipfile.ZipFile(egg, 'w') + zip_info = zipfile.ZipInfo() + zip_info.filename = 'mod.py' + zip_info.date_time = cls.ref_time.timetuple() + zip_egg.writestr(zip_info, 'x = 3\n') + zip_info = zipfile.ZipInfo() + zip_info.filename = 'data.dat' + zip_info.date_time = cls.ref_time.timetuple() + zip_egg.writestr(zip_info, 'hello, world!') + zip_egg.close() + egg.close() + + sys.path.append(egg.name) + cls.finalizers.append(EggRemover(egg.name)) + + @classmethod + def teardown_class(cls): + for finalizer in cls.finalizers: + finalizer() + + def test_resource_filename_rewrites_on_change(self): + """ + If a previous call to get_resource_filename has saved the file, but + the file has been subsequently mutated with different file of the + same size and modification time, it should not be overwritten on a + subsequent call to get_resource_filename. + """ + import mod + manager = pkg_resources.ResourceManager() + zp = pkg_resources.ZipProvider(mod) + filename = zp.get_resource_filename(manager, 'data.dat') + actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime) + assert actual == self.ref_time + f = open(filename, 'w') + f.write('hello, world?') + f.close() + ts = timestamp(self.ref_time) + os.utime(filename, (ts, ts)) + filename = zp.get_resource_filename(manager, 'data.dat') + f = open(filename) + assert f.read() == 'hello, world!' + manager.cleanup_resources() + +class TestResourceManager(object): + def test_get_cache_path(self): + mgr = pkg_resources.ResourceManager() + path = mgr.get_cache_path('foo') + type_ = str(type(path)) + message = "Unexpected type from get_cache_path: " + type_ + assert isinstance(path, (unicode, str)), message + + +class TestIndependence: + """ + Tests to ensure that pkg_resources runs independently from setuptools. + """ + def test_setuptools_not_imported(self): + """ + In a separate Python environment, import pkg_resources and assert + that action doesn't cause setuptools to be imported. + """ + lines = ( + 'import pkg_resources', + 'import sys', + 'assert "setuptools" not in sys.modules, ' + '"setuptools was imported"', + ) + cmd = [sys.executable, '-c', '; '.join(lines)] + subprocess.check_call(cmd) diff --git a/awx/lib/site-packages/pkg_resources/tests/test_resources.py b/awx/lib/site-packages/pkg_resources/tests/test_resources.py new file mode 100644 index 0000000000..a55478a249 --- /dev/null +++ b/awx/lib/site-packages/pkg_resources/tests/test_resources.py @@ -0,0 +1,661 @@ +import os +import sys +import tempfile +import shutil +import string + +import pytest + +import pkg_resources +from pkg_resources import (parse_requirements, VersionConflict, parse_version, + Distribution, EntryPoint, Requirement, safe_version, safe_name, + WorkingSet) + +packaging = pkg_resources.packaging + + +def safe_repr(obj, short=False): + """ copied from Python2.7""" + try: + result = repr(obj) + except Exception: + result = object.__repr__(obj) + if not short or len(result) < pkg_resources._MAX_LENGTH: + return result + return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' + + +class Metadata(pkg_resources.EmptyProvider): + """Mock object to return metadata as if from an on-disk distribution""" + + def __init__(self, *pairs): + self.metadata = dict(pairs) + + def has_metadata(self, name): + return name in self.metadata + + def get_metadata(self, name): + return self.metadata[name] + + def get_metadata_lines(self, name): + return pkg_resources.yield_lines(self.get_metadata(name)) + + +dist_from_fn = pkg_resources.Distribution.from_filename + +class TestDistro: + + def testCollection(self): + # empty path should produce no distributions + ad = pkg_resources.Environment([], platform=None, python=None) + assert list(ad) == [] + assert ad['FooPkg'] == [] + ad.add(dist_from_fn("FooPkg-1.3_1.egg")) + ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) + ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) + + # Name is in there now + assert ad['FooPkg'] + # But only 1 package + assert list(ad) == ['foopkg'] + + # Distributions sort by version + assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2'] + + # Removing a distribution leaves sequence alone + ad.remove(ad['FooPkg'][1]) + assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2'] + + # And inserting adds them in order + ad.add(dist_from_fn("FooPkg-1.9.egg")) + assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2'] + + ws = WorkingSet([]) + foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") + foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") + req, = parse_requirements("FooPkg>=1.3") + + # Nominal case: no distros on path, should yield all applicable + assert ad.best_match(req, ws).version == '1.9' + # If a matching distro is already installed, should return only that + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + # If the first matching distro is unsuitable, it's a version conflict + ws = WorkingSet([]) + ws.add(foo12) + ws.add(foo14) + with pytest.raises(VersionConflict): + ad.best_match(req, ws) + + # If more than one match on the path, the first one takes precedence + ws = WorkingSet([]) + ws.add(foo14) + ws.add(foo12) + ws.add(foo14) + assert ad.best_match(req, ws).version == '1.4' + + def checkFooPkg(self,d): + assert d.project_name == "FooPkg" + assert d.key == "foopkg" + assert d.version == "1.3.post1" + assert d.py_version == "2.4" + assert d.platform == "win32" + assert d.parsed_version == parse_version("1.3-1") + + def testDistroBasics(self): + d = Distribution( + "/some/path", + project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" + ) + self.checkFooPkg(d) + + d = Distribution("/some/path") + assert d.py_version == sys.version[:3] + assert d.platform == None + + def testDistroParse(self): + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg") + self.checkFooPkg(d) + d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info") + self.checkFooPkg(d) + + def testDistroMetadata(self): + d = Distribution( + "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", + metadata = Metadata( + ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") + ) + ) + self.checkFooPkg(d) + + def distRequires(self, txt): + return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) + + def checkRequires(self, dist, txt, extras=()): + assert list(dist.requires(extras)) == list(parse_requirements(txt)) + + def testDistroDependsSimple(self): + for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": + self.checkRequires(self.distRequires(v), v) + + def testResolve(self): + ad = pkg_resources.Environment([]) + ws = WorkingSet([]) + # Resolving no requirements -> nothing to install + assert list(ws.resolve([], ad)) == [] + # Request something not in the collection -> DistributionNotFound + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo"), ad) + + Foo = Distribution.from_filename( + "/foo_dir/Foo-1.2.egg", + metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) + ) + ad.add(Foo) + ad.add(Distribution.from_filename("Foo-0.9.egg")) + + # Request thing(s) that are available -> list to activate + for i in range(3): + targets = list(ws.resolve(parse_requirements("Foo"), ad)) + assert targets == [Foo] + list(map(ws.add,targets)) + with pytest.raises(VersionConflict): + ws.resolve(parse_requirements("Foo==0.9"), ad) + ws = WorkingSet([]) # reset + + # Request an extra that causes an unresolved dependency for "Baz" + with pytest.raises(pkg_resources.DistributionNotFound): + ws.resolve(parse_requirements("Foo[bar]"), ad) + Baz = Distribution.from_filename( + "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) + ) + ad.add(Baz) + + # Activation list now includes resolved dependency + assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) ==[Foo,Baz] + # Requests for conflicting versions produce VersionConflict + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad) + + msg = 'Foo 0.9 is installed but Foo==1.2 is required' + assert vc.value.report() == msg + + def testDistroDependsOptions(self): + d = self.distRequires(""" + Twisted>=1.5 + [docgen] + ZConfig>=2.0 + docutils>=0.3 + [fastcgi] + fcgiapp>=0.1""") + self.checkRequires(d,"Twisted>=1.5") + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), + ["docgen","fastcgi"] + ) + self.checkRequires( + d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), + ["fastcgi", "docgen"] + ) + with pytest.raises(pkg_resources.UnknownExtra): + d.requires(["foo"]) + + +class TestWorkingSet: + def test_find_conflicting(self): + ws = WorkingSet([]) + Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") + ws.add(Foo) + + # create a requirement that conflicts with Foo 1.2 + req = next(parse_requirements("Foo<1.2")) + + with pytest.raises(VersionConflict) as vc: + ws.find(req) + + msg = 'Foo 1.2 is installed but Foo<1.2 is required' + assert vc.value.report() == msg + + def test_resolve_conflicts_with_prior(self): + """ + A ContextualVersionConflict should be raised when a requirement + conflicts with a prior requirement for a different package. + """ + # Create installation where Foo depends on Baz 1.0 and Bar depends on + # Baz 2.0. + ws = WorkingSet([]) + md = Metadata(('depends.txt', "Baz==1.0")) + Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) + ws.add(Foo) + md = Metadata(('depends.txt', "Baz==2.0")) + Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) + ws.add(Bar) + Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") + ws.add(Baz) + Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") + ws.add(Baz) + + with pytest.raises(VersionConflict) as vc: + ws.resolve(parse_requirements("Foo\nBar\n")) + + msg = "Baz 1.0 is installed but Baz==2.0 is required by {'Bar'}" + if pkg_resources.PY2: + msg = msg.replace("{'Bar'}", "set(['Bar'])") + assert vc.value.report() == msg + + +class TestEntryPoints: + + def assertfields(self, ep): + assert ep.name == "foo" + assert ep.module_name == "pkg_resources.tests.test_resources" + assert ep.attrs == ("TestEntryPoints",) + assert ep.extras == ("x",) + assert ep.load() is TestEntryPoints + expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + assert str(ep) == expect + + def setup_method(self, method): + self.dist = Distribution.from_filename( + "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) + + def testBasics(self): + ep = EntryPoint( + "foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], + ["x"], self.dist + ) + self.assertfields(ep) + + def testParse(self): + s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]" + ep = EntryPoint.parse(s, self.dist) + self.assertfields(ep) + + ep = EntryPoint.parse("bar baz= spammity[PING]") + assert ep.name == "bar baz" + assert ep.module_name == "spammity" + assert ep.attrs == () + assert ep.extras == ("ping",) + + ep = EntryPoint.parse(" fizzly = wocka:foo") + assert ep.name == "fizzly" + assert ep.module_name == "wocka" + assert ep.attrs == ("foo",) + assert ep.extras == () + + # plus in the name + spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer" + ep = EntryPoint.parse(spec) + assert ep.name == 'html+mako' + + reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2" + @pytest.mark.parametrize("reject_spec", reject_specs) + def test_reject_spec(self, reject_spec): + with pytest.raises(ValueError): + EntryPoint.parse(reject_spec) + + def test_printable_name(self): + """ + Allow any printable character in the name. + """ + # Create a name with all printable characters; strip the whitespace. + name = string.printable.strip() + spec = "{name} = module:attr".format(**locals()) + ep = EntryPoint.parse(spec) + assert ep.name == name + + def checkSubMap(self, m): + assert len(m) == len(self.submap_expect) + for key, ep in pkg_resources.iteritems(self.submap_expect): + assert repr(m.get(key)) == repr(ep) + + submap_expect = dict( + feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), + feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), + feature3=EntryPoint('feature3', 'this.module', extras=['something']) + ) + submap_str = """ + # define features for blah blah + feature1 = somemodule:somefunction + feature2 = another.module:SomeClass [extra1,extra2] + feature3 = this.module [something] + """ + + def testParseList(self): + self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) + with pytest.raises(ValueError): + EntryPoint.parse_group("x a", "foo=bar") + with pytest.raises(ValueError): + EntryPoint.parse_group("x", ["foo=baz", "foo=bar"]) + + def testParseMap(self): + m = EntryPoint.parse_map({'xyz':self.submap_str}) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) + self.checkSubMap(m['xyz']) + assert list(m.keys()) == ['xyz'] + with pytest.raises(ValueError): + EntryPoint.parse_map(["[xyz]", "[xyz]"]) + with pytest.raises(ValueError): + EntryPoint.parse_map(self.submap_str) + +class TestRequirements: + + def testBasics(self): + r = Requirement.parse("Twisted>=1.2") + assert str(r) == "Twisted>=1.2" + assert repr(r) == "Requirement.parse('Twisted>=1.2')" + assert r == Requirement("Twisted", [('>=','1.2')], ()) + assert r == Requirement("twisTed", [('>=','1.2')], ()) + assert r != Requirement("Twisted", [('>=','2.0')], ()) + assert r != Requirement("Zope", [('>=','1.2')], ()) + assert r != Requirement("Zope", [('>=','3.0')], ()) + assert r != Requirement.parse("Twisted[extras]>=1.2") + + def testOrdering(self): + r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) + r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) + assert r1 == r2 + assert str(r1) == str(r2) + assert str(r2) == "Twisted==1.2c1,>=1.2" + + def testBasicContains(self): + r = Requirement("Twisted", [('>=','1.2')], ()) + foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") + twist11 = Distribution.from_filename("Twisted-1.1.egg") + twist12 = Distribution.from_filename("Twisted-1.2.egg") + assert parse_version('1.2') in r + assert parse_version('1.1') not in r + assert '1.2' in r + assert '1.1' not in r + assert foo_dist not in r + assert twist11 not in r + assert twist12 in r + + def testOptionsAndHashing(self): + r1 = Requirement.parse("Twisted[foo,bar]>=1.2") + r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") + assert r1 == r2 + assert r1.extras == ("foo","bar") + assert r2.extras == ("bar","foo") # extras are normalized + assert hash(r1) == hash(r2) + assert ( + hash(r1) + == + hash(( + "twisted", + packaging.specifiers.SpecifierSet(">=1.2"), + frozenset(["foo","bar"]), + )) + ) + + def testVersionEquality(self): + r1 = Requirement.parse("foo==0.3a2") + r2 = Requirement.parse("foo!=0.3a4") + d = Distribution.from_filename + + assert d("foo-0.3a4.egg") not in r1 + assert d("foo-0.3a1.egg") not in r1 + assert d("foo-0.3a4.egg") not in r2 + + assert d("foo-0.3a2.egg") in r1 + assert d("foo-0.3a2.egg") in r2 + assert d("foo-0.3a3.egg") in r2 + assert d("foo-0.3a5.egg") in r2 + + def testSetuptoolsProjectName(self): + """ + The setuptools project should implement the setuptools package. + """ + + assert ( + Requirement.parse('setuptools').project_name == 'setuptools') + # setuptools 0.7 and higher means setuptools. + assert ( + Requirement.parse('setuptools == 0.7').project_name == 'setuptools') + assert ( + Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools') + assert ( + Requirement.parse('setuptools >= 0.7').project_name == 'setuptools') + + +class TestParsing: + + def testEmptyParse(self): + assert list(parse_requirements('')) == [] + + def testYielding(self): + for inp,out in [ + ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), + (['x\n\n','y'], ['x','y']), + ]: + assert list(pkg_resources.yield_lines(inp)) == out + + def testSplitting(self): + sample = """ + x + [Y] + z + + a + [b ] + # foo + c + [ d] + [q] + v + """ + assert ( + list(pkg_resources.split_sections(sample)) + == + [ + (None, ["x"]), + ("Y", ["z", "a"]), + ("b", ["c"]), + ("d", []), + ("q", ["v"]), + ] + ) + with pytest.raises(ValueError): + list(pkg_resources.split_sections("[foo")) + + def testSafeName(self): + assert safe_name("adns-python") == "adns-python" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("WSGI Utils") == "WSGI-Utils" + assert safe_name("Money$$$Maker") == "Money-Maker" + assert safe_name("peak.web") != "peak-web" + + def testSafeVersion(self): + assert safe_version("1.2-1") == "1.2.post1" + assert safe_version("1.2 alpha") == "1.2.alpha" + assert safe_version("2.3.4 20050521") == "2.3.4.20050521" + assert safe_version("Money$$$Maker") == "Money-Maker" + assert safe_version("peak.web") == "peak.web" + + def testSimpleRequirements(self): + assert ( + list(parse_requirements('Twis-Ted>=1.2-1')) + == + [Requirement('Twis-Ted',[('>=','1.2-1')], ())] + ) + assert ( + list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')) + == + [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] + ) + assert ( + Requirement.parse("FooBar==1.99a3") + == + Requirement("FooBar", [('==','1.99a3')], ()) + ) + with pytest.raises(ValueError): + Requirement.parse(">=2.3") + with pytest.raises(ValueError): + Requirement.parse("x\\") + with pytest.raises(ValueError): + Requirement.parse("x==2 q") + with pytest.raises(ValueError): + Requirement.parse("X==1\nY==2") + with pytest.raises(ValueError): + Requirement.parse("#") + + def testVersionEquality(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + assert p1 == p2, (s1,s2,p1,p2) + + c('1.2-rc1', '1.2rc1') + c('0.4', '0.4.0') + c('0.4.0.0', '0.4.0') + c('0.4.0-0', '0.4-0') + c('0post1', '0.0post1') + c('0pre1', '0.0c1') + c('0.0.0preview1', '0c1') + c('0.0c1', '0-rc1') + c('1.2a1', '1.2.a.1') + c('1.2.a', '1.2a') + + def testVersionOrdering(self): + def c(s1,s2): + p1, p2 = parse_version(s1),parse_version(s2) + assert p1<p2, (s1,s2,p1,p2) + + c('2.1','2.1.1') + c('2a1','2b0') + c('2a1','2.1') + c('2.3a1', '2.3') + c('2.1-1', '2.1-2') + c('2.1-1', '2.1.1') + c('2.1', '2.1post4') + c('2.1a0-20040501', '2.1') + c('1.1', '02.1') + c('3.2', '3.2.post0') + c('3.2post1', '3.2post2') + c('0.4', '4.0') + c('0.0.4', '0.4.0') + c('0post1', '0.4post1') + c('2.1.0-rc1','2.1.0') + c('2.1dev','2.1a0') + + torture =""" + 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1 + 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2 + 0.77.2-1 0.77.1-1 0.77.0-1 + """.split() + + for p,v1 in enumerate(torture): + for v2 in torture[p+1:]: + c(v2,v1) + + def testVersionBuildout(self): + """ + Buildout has a function in it's bootstrap.py that inspected the return + value of parse_version. The new parse_version returns a Version class + which needs to support this behavior, at least for now. + """ + def buildout(parsed_version): + _final_parts = '*final-', '*final' + + def _final_version(parsed_version): + for part in parsed_version: + if (part[:1] == '*') and (part not in _final_parts): + return False + return True + return _final_version(parsed_version) + + assert buildout(parse_version("1.0")) + assert not buildout(parse_version("1.0a1")) + + def testVersionIndexable(self): + """ + Some projects were doing things like parse_version("v")[0], so we'll + support indexing the same as we support iterating. + """ + assert parse_version("1.0")[0] == "00000001" + + def testVersionTupleSort(self): + """ + Some projects expected to be able to sort tuples against the return + value of parse_version. So again we'll add a warning enabled shim to + make this possible. + """ + assert parse_version("1.0") < tuple(parse_version("2.0")) + assert parse_version("1.0") <= tuple(parse_version("2.0")) + assert parse_version("1.0") == tuple(parse_version("1.0")) + assert parse_version("3.0") > tuple(parse_version("2.0")) + assert parse_version("3.0") >= tuple(parse_version("2.0")) + assert parse_version("3.0") != tuple(parse_version("2.0")) + assert not (parse_version("3.0") != tuple(parse_version("3.0"))) + + def testVersionHashable(self): + """ + Ensure that our versions stay hashable even though we've subclassed + them and added some shim code to them. + """ + assert ( + hash(parse_version("1.0")) + == + hash(parse_version("1.0")) + ) + + +class TestNamespaces: + + def setup_method(self, method): + self._ns_pkgs = pkg_resources._namespace_packages.copy() + self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") + os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) + self._prev_sys_path = sys.path[:] + sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) + + def teardown_method(self, method): + shutil.rmtree(self._tmpdir) + pkg_resources._namespace_packages = self._ns_pkgs.copy() + sys.path = self._prev_sys_path[:] + + @pytest.mark.skipif(os.path.islink(tempfile.gettempdir()), + reason="Test fails when /tmp is a symlink. See #231") + def test_two_levels_deep(self): + """ + Test nested namespace packages + Create namespace packages in the following tree : + site-packages-1/pkg1/pkg2 + site-packages-2/pkg1/pkg2 + Check both are in the _namespace_packages dict and that their __path__ + is correct + """ + sys.path.append(os.path.join(self._tmpdir, "site-pkgs2")) + os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2")) + os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")) + ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" + for site in ["site-pkgs", "site-pkgs2"]: + pkg1_init = open(os.path.join(self._tmpdir, site, + "pkg1", "__init__.py"), "w") + pkg1_init.write(ns_str) + pkg1_init.close() + pkg2_init = open(os.path.join(self._tmpdir, site, + "pkg1", "pkg2", "__init__.py"), "w") + pkg2_init.write(ns_str) + pkg2_init.close() + import pkg1 + assert "pkg1" in pkg_resources._namespace_packages + # attempt to import pkg2 from site-pkgs2 + import pkg1.pkg2 + # check the _namespace_packages dict + assert "pkg1.pkg2" in pkg_resources._namespace_packages + assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"] + # check the __path__ attribute contains both paths + expected = [ + os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"), + os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"), + ] + assert pkg1.pkg2.__path__ == expected diff --git a/awx/lib/site-packages/pytz/__init__.py b/awx/lib/site-packages/pytz/__init__.py index b836ae336c..e5cbe56d6b 100644 --- a/awx/lib/site-packages/pytz/__init__.py +++ b/awx/lib/site-packages/pytz/__init__.py @@ -9,8 +9,8 @@ on how to use these modules. ''' # The Olson database is updated several times a year. -OLSON_VERSION = '2014d' -VERSION = '2014.4' # Switching to pip compatible version numbering. +OLSON_VERSION = '2014j' +VERSION = '2014.10' # Switching to pip compatible version numbering. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling @@ -735,6 +735,7 @@ all_timezones = \ 'Asia/Bishkek', 'Asia/Brunei', 'Asia/Calcutta', + 'Asia/Chita', 'Asia/Choibalsan', 'Asia/Chongqing', 'Asia/Chungking', @@ -792,6 +793,7 @@ all_timezones = \ 'Asia/Seoul', 'Asia/Shanghai', 'Asia/Singapore', + 'Asia/Srednekolymsk', 'Asia/Taipei', 'Asia/Tashkent', 'Asia/Tbilisi', @@ -1002,6 +1004,7 @@ all_timezones = \ 'PST8PDT', 'Pacific/Apia', 'Pacific/Auckland', + 'Pacific/Bougainville', 'Pacific/Chatham', 'Pacific/Chuuk', 'Pacific/Easter', @@ -1297,8 +1300,8 @@ common_timezones = \ 'Asia/Beirut', 'Asia/Bishkek', 'Asia/Brunei', + 'Asia/Chita', 'Asia/Choibalsan', - 'Asia/Chongqing', 'Asia/Colombo', 'Asia/Damascus', 'Asia/Dhaka', @@ -1306,7 +1309,6 @@ common_timezones = \ 'Asia/Dubai', 'Asia/Dushanbe', 'Asia/Gaza', - 'Asia/Harbin', 'Asia/Hebron', 'Asia/Ho_Chi_Minh', 'Asia/Hong_Kong', @@ -1318,7 +1320,6 @@ common_timezones = \ 'Asia/Kabul', 'Asia/Kamchatka', 'Asia/Karachi', - 'Asia/Kashgar', 'Asia/Kathmandu', 'Asia/Khandyga', 'Asia/Kolkata', @@ -1348,6 +1349,7 @@ common_timezones = \ 'Asia/Seoul', 'Asia/Shanghai', 'Asia/Singapore', + 'Asia/Srednekolymsk', 'Asia/Taipei', 'Asia/Tashkent', 'Asia/Tbilisi', @@ -1460,6 +1462,7 @@ common_timezones = \ 'Indian/Reunion', 'Pacific/Apia', 'Pacific/Auckland', + 'Pacific/Bougainville', 'Pacific/Chatham', 'Pacific/Chuuk', 'Pacific/Easter', diff --git a/awx/lib/site-packages/pytz/tzinfo.py b/awx/lib/site-packages/pytz/tzinfo.py index d53e9ff172..1318872df2 100644 --- a/awx/lib/site-packages/pytz/tzinfo.py +++ b/awx/lib/site-packages/pytz/tzinfo.py @@ -142,7 +142,7 @@ class StaticTzInfo(BaseTzInfo): def __reduce__(self): # Special pickle to zone remains a singleton and to cope with - # database changes. + # database changes. return pytz._p, (self.zone,) @@ -369,13 +369,15 @@ class DstTzInfo(BaseTzInfo): # hints to be passed in (such as the UTC offset or abbreviation), # but that is just getting silly. # - # Choose the earliest (by UTC) applicable timezone. - sorting_keys = {} + # Choose the earliest (by UTC) applicable timezone if is_dst=True + # Choose the latest (by UTC) applicable timezone if is_dst=False + # i.e., behave like end-of-DST transition + dates = {} # utc -> local for local_dt in filtered_possible_loc_dt: - key = local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset - sorting_keys[key] = local_dt - first_key = sorted(sorting_keys)[0] - return sorting_keys[first_key] + utc_time = local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset + assert utc_time not in dates + dates[utc_time] = local_dt + return dates[[min, max][not is_dst](dates)] def utcoffset(self, dt, is_dst=None): '''See datetime.tzinfo.utcoffset @@ -560,4 +562,3 @@ def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): inf = (utcoffset, dstoffset, tzname) tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) return tz._tzinfos[inf] - diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra b/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra index da73b24319..6ff8fb6b23 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Accra differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa b/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa index 4dfa06ab70..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Addis_Ababa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara index 0bc80c4400..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmara differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera index 0bc80c4400..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Asmera differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako index 98a6f9afef..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bamako differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui index 20ee217573..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bangui differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul b/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul index 0264bdc5bf..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Banjul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau index 33a011a5c0..0696667ce8 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bissau differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre b/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre index aebba5d959..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Blantyre differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville b/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville index 5d98295190..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Brazzaville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura index fff46c5204..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Bujumbura differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo index f69274c8ba..0eeed1138f 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Cairo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca b/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca index 2647ad7958..c001c375ff 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Casablanca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry b/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry index f6396d8d1c..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Conakry differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar index 8987fe6514..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dakar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam index 2ddddc5f35..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti b/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti index 559aabc163..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Djibouti differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala b/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala index cd55c303c4..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Douala differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun b/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun index 65d99ce6e5..805d39e415 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun and b/awx/lib/site-packages/pytz/zoneinfo/Africa/El_Aaiun differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown b/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown index 977d316ce0..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Freetown differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone b/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone index 424534c498..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Gaborone differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare b/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare index 0e53de0a33..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Harare differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala index c6b5720e06..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kampala differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali index b99c20940b..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kigali differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa index fd3ced891d..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Kinshasa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville b/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville index d38ab6bdec..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Libreville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome index a1e9d31a07..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda b/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda index 073b3b295c..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Luanda differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi index 05aad3c8a5..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lubumbashi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka index 612a8a07a0..5b871dbaa7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Lusaka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo index 241f477d70..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Malabo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru index 7fb3b0a728..ddf3652e15 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Maseru differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane index 8f0d40f260..ddf3652e15 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mbabane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu index 3c278ab236..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Mogadishu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey b/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey index fff84a3b1e..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Niamey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott index f70cb4c992..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Nouakchott differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou index c0f20e0874..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Ouagadougou differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo b/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo index 3e7ca2bbf7..b1c97cc5a7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Porto-Novo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome b/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome index 8ce184a9bf..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Sao_Tome differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu b/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu index 98a6f9afef..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu and b/awx/lib/site-packages/pytz/zoneinfo/Africa/Timbuktu differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk b/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk index c0f75ac68f..331aeac26e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk and b/awx/lib/site-packages/pytz/zoneinfo/America/Grand_Turk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica b/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica index 24ea5dc09b..006689bc89 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica and b/awx/lib/site-packages/pytz/zoneinfo/America/Jamaica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla b/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla index c95eab3b14..e66cc3417a 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla and b/awx/lib/site-packages/pytz/zoneinfo/America/Metlakatla differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey index 045be68737..c2a990564d 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Casey differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie index 200d519c10..fc7b96fe87 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie and b/awx/lib/site-packages/pytz/zoneinfo/Antarctica/Macquarie differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chita b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chita new file mode 100644 index 0000000000..c09065470e Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chita differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing index b5b555fb99..dbd132f2b0 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chongqing differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking index b5b555fb99..dbd132f2b0 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Chungking differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca index 3aaf78135c..b6b326b20e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dacca differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka index 3aaf78135c..b6b326b20e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Dhaka differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin b/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin index a763ba7ea8..dbd132f2b0 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Harbin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh index 86fff6b954..c14226570b 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk index c7df7ea6c0..1e94a47987 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Irkutsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura index 83bff56115..a9d12177d5 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Jayapura differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar index a4f79ad454..964a5c24b7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Kashgar differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga b/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga index 356401fabe..26becb3291 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Khandyga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk index bebf8764db..3107809022 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan b/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan index 920cfd50a5..e09c4dc2e2 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Magadan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk index ded0690021..1176866273 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novokuznetsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk index 4ec582b572..ed6d7dc541 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Novosibirsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk index 5093cd924f..760c391061 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Omsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh index 37c9e15fb9..e8e76276a6 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Phnom_Penh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang index 63732b2bc1..b04bf74efd 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Pyongyang differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh index 2ff300678e..c35e42a121 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Riyadh differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon b/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon index 86fff6b954..c14226570b 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Saigon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin b/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin index 7351186ae5..ec62afc599 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Sakhalin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand b/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand index a836887af8..65fb5b03de 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Samarkand differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul b/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul index 0b94de84bf..fd91d5b729 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Seoul differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai b/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai index 02242f0d9c..dbd132f2b0 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Shanghai differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Srednekolymsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Srednekolymsk new file mode 100644 index 0000000000..0929f66deb Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Srednekolymsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei b/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei index b385632677..4810a0b614 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Taipei differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent index a89bcf85bb..1f59faa534 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tashkent differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi index f426df7055..0d7081e975 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tbilisi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo index bb6352bcb9..024414031e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Tokyo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi b/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi index d62e3bb02a..964a5c24b7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Urumqi differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera index 39b074491b..0efacd6bea 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Ust-Nera differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane index 67e90e0cf8..e8e76276a6 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vientiane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok index 8cd08b558e..156c8e6f52 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Vladivostok differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk index 9dbd55781a..58ff25ea1f 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yakutsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg index ce21137e24..a1baafaec9 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg and b/awx/lib/site-packages/pytz/zoneinfo/Asia/Yekaterinburg differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores index bc03b54b53..1f53253245 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Azores differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira index ce2f5290f5..3687fd66e3 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/Madeira differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena index 52cb99bacd..6fd1af32da 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena and b/awx/lib/site-packages/pytz/zoneinfo/Atlantic/St_Helena differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT b/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT index 733a6bfd2f..aaed12ca28 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT and b/awx/lib/site-packages/pytz/zoneinfo/Australia/ACT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide b/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide index 8e560a7216..4f331a87df 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Adelaide differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane b/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane index f9525033b7..a327d83b76 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Brisbane differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill b/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill index fdc5ffb5c4..768b167857 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Broken_Hill differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra b/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra index 733a6bfd2f..aaed12ca28 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Canberra differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie b/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie index e6115974ee..a3f6f29a49 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Currie differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin b/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin index 26d7dc3b43..c6ae9a7ba2 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Darwin differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla b/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla index f3b7cda02f..baba07a310 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Eucla differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart b/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart index 49e71d6b98..07784ce5d7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Hobart differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI b/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI index f59feb1322..a653e5166d 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI and b/awx/lib/site-packages/pytz/zoneinfo/Australia/LHI differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman index 1db8ef7c90..71ca143f29 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lindeman differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe index f59feb1322..a653e5166d 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Lord_Howe differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne b/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne index a1dc7d6e1c..ec8dfe038c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Melbourne differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW b/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW index 733a6bfd2f..aaed12ca28 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW and b/awx/lib/site-packages/pytz/zoneinfo/Australia/NSW differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/North b/awx/lib/site-packages/pytz/zoneinfo/Australia/North index 26d7dc3b43..c6ae9a7ba2 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/North and b/awx/lib/site-packages/pytz/zoneinfo/Australia/North differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth b/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth index 6dd4141f90..85c26d509a 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Perth differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland b/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland index f9525033b7..a327d83b76 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Queensland differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/South b/awx/lib/site-packages/pytz/zoneinfo/Australia/South index 8e560a7216..4f331a87df 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/South and b/awx/lib/site-packages/pytz/zoneinfo/Australia/South differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney b/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney index 733a6bfd2f..aaed12ca28 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Sydney differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania b/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania index 49e71d6b98..07784ce5d7 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Tasmania differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria b/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria index a1dc7d6e1c..ec8dfe038c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Victoria differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/West b/awx/lib/site-packages/pytz/zoneinfo/Australia/West index 6dd4141f90..85c26d509a 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/West and b/awx/lib/site-packages/pytz/zoneinfo/Australia/West differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna b/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna index fdc5ffb5c4..768b167857 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna and b/awx/lib/site-packages/pytz/zoneinfo/Australia/Yancowinna differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Egypt b/awx/lib/site-packages/pytz/zoneinfo/Egypt index f69274c8ba..0eeed1138f 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Egypt and b/awx/lib/site-packages/pytz/zoneinfo/Egypt differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest b/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest index 497c14eca4..3ddf6a5289 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Budapest differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki b/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki index 88a6ff42fa..29b3c817f4 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Helsinki differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad index 300a58c4e6..4805fe4251 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Kaliningrad differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon b/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon index 612f04fe61..b9aff3a51c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Lisbon differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn b/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn index 88a6ff42fa..29b3c817f4 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Mariehamn differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk b/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk index fa1e2e4ef8..28ef30a325 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Minsk differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow b/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow index 63ff2abfab..bdbbaebe05 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Moscow differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga b/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga index 38dfebd011..b729ee8c2e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Riga differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara b/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara index d2fa9dfbc6..79759f5377 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Samara differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol b/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol index 2abdcd3b70..ebe9017d40 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Simferopol differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd b/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd index a103eaf80c..c62c32a653 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd and b/awx/lib/site-packages/pytz/zoneinfo/Europe/Volgograd differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo b/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo index 33d59cc974..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Antananarivo differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro b/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro index 298db9b7db..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Comoro differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte index c915d90973..750d3dc14c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte and b/awx/lib/site-packages/pytz/zoneinfo/Indian/Mayotte differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Jamaica b/awx/lib/site-packages/pytz/zoneinfo/Jamaica index 24ea5dc09b..006689bc89 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Jamaica and b/awx/lib/site-packages/pytz/zoneinfo/Jamaica differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Japan b/awx/lib/site-packages/pytz/zoneinfo/Japan index bb6352bcb9..024414031e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Japan and b/awx/lib/site-packages/pytz/zoneinfo/Japan differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT b/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT index f3a8e4d8a4..59bc4ede98 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT and b/awx/lib/site-packages/pytz/zoneinfo/NZ-CHAT differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/PRC b/awx/lib/site-packages/pytz/zoneinfo/PRC index 02242f0d9c..dbd132f2b0 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/PRC and b/awx/lib/site-packages/pytz/zoneinfo/PRC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia index 4ff1b93515..cc5d2cd2d4 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Apia differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Bougainville b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Bougainville new file mode 100644 index 0000000000..219c78cd24 Binary files /dev/null and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Bougainville differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham index f3a8e4d8a4..59bc4ede98 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Chatham differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji index d91c7e5da8..b75f194e9e 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Fiji differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago index f776ed08e7..1d7649ff71 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Pago_Pago differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa index f776ed08e7..1d7649ff71 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa and b/awx/lib/site-packages/pytz/zoneinfo/Pacific/Samoa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/Portugal b/awx/lib/site-packages/pytz/zoneinfo/Portugal index 612f04fe61..b9aff3a51c 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/Portugal and b/awx/lib/site-packages/pytz/zoneinfo/Portugal differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/ROC b/awx/lib/site-packages/pytz/zoneinfo/ROC index b385632677..4810a0b614 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/ROC and b/awx/lib/site-packages/pytz/zoneinfo/ROC differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/ROK b/awx/lib/site-packages/pytz/zoneinfo/ROK index 0b94de84bf..fd91d5b729 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/ROK and b/awx/lib/site-packages/pytz/zoneinfo/ROK differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/US/Samoa b/awx/lib/site-packages/pytz/zoneinfo/US/Samoa index f776ed08e7..1d7649ff71 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/US/Samoa and b/awx/lib/site-packages/pytz/zoneinfo/US/Samoa differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/W-SU b/awx/lib/site-packages/pytz/zoneinfo/W-SU index 63ff2abfab..bdbbaebe05 100644 Binary files a/awx/lib/site-packages/pytz/zoneinfo/W-SU and b/awx/lib/site-packages/pytz/zoneinfo/W-SU differ diff --git a/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab b/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab index a1e4b42e44..0b0b8426d4 100644 --- a/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab +++ b/awx/lib/site-packages/pytz/zoneinfo/iso3166.tab @@ -3,21 +3,21 @@ # This file is in the public domain, so clarified as of # 2009-05-17 by Arthur David Olson. # -# From Paul Eggert (2013-05-27): +# From Paul Eggert (2014-07-18): +# This file contains a table of two-letter country codes. Columns are +# separated by a single tab. Lines beginning with '#' are comments. +# Although all text currently uses ASCII encoding, this is planned to +# change to UTF-8 soon. The columns of the table are as follows: # -# This file contains a table with the following columns: # 1. ISO 3166-1 alpha-2 country code, current as of -# ISO 3166-1 Newsletter VI-15 (2013-05-10). See: Updates on ISO 3166 +# ISO 3166-1 Newsletter VI-16 (2013-07-11). See: Updates on ISO 3166 # http://www.iso.org/iso/home/standards/country_codes/updates_on_iso_3166.htm # 2. The usual English name for the coded region, # chosen so that alphabetic sorting of subsets produces helpful lists. # This is not the same as the English name in the ISO 3166 tables. # -# Columns are separated by a single tab. # The table is sorted by country code. # -# Lines beginning with `#' are comments. -# # This table is intended as an aid for users, to help them select time # zone data appropriate for their practical needs. It is not intended # to take or endorse any position on legal or territorial claims. diff --git a/awx/lib/site-packages/pytz/zoneinfo/zone.tab b/awx/lib/site-packages/pytz/zoneinfo/zone.tab index 923d6ac5be..a7373f177d 100644 --- a/awx/lib/site-packages/pytz/zoneinfo/zone.tab +++ b/awx/lib/site-packages/pytz/zoneinfo/zone.tab @@ -1,36 +1,24 @@ -# TZ zone descriptions +# tz zone descriptions (deprecated version) # # This file is in the public domain, so clarified as of # 2009-05-17 by Arthur David Olson. # -# From Paul Eggert (2013-08-14): +# From Paul Eggert (2014-07-31): +# This file is intended as a backward-compatibility aid for older programs. +# New programs should use zone1970.tab. This file is like zone1970.tab (see +# zone1970.tab's comments), but with the following additional restrictions: # -# This file contains a table where each row stands for an area that is -# the intersection of a region identified by a country code and of a -# zone where civil clocks have agreed since 1970. The columns of the -# table are as follows: +# 1. This file contains only ASCII characters. +# 2. The first data column contains exactly one country code. # -# 1. ISO 3166 2-character country code. See the file 'iso3166.tab'. -# 2. Latitude and longitude of the area's principal location -# in ISO 6709 sign-degrees-minutes-seconds format, -# either +-DDMM+-DDDMM or +-DDMMSS+-DDDMMSS, -# first latitude (+ is north), then longitude (+ is east). -# 3. Zone name used in value of TZ environment variable. -# Please see the 'Theory' file for how zone names are chosen. -# If multiple zones overlap a country, each has a row in the -# table, with column 1 being duplicated. -# 4. Comments; present if and only if the country has multiple rows. -# -# Columns are separated by a single tab. -# The table is sorted first by country, then an order within the country that -# (1) makes some geographical sense, and -# (2) puts the most populous areas first, where that does not contradict (1). -# -# Lines beginning with '#' are comments. +# Because of (2), each row stands for an area that is the intersection +# of a region identified by a country code and of a zone where civil +# clocks have agreed since 1970; this is a narrower definition than +# that of zone1970.tab. # # This table is intended as an aid for users, to help them select time -# zone data appropriate for their practical needs. It is not intended -# to take or endorse any position on legal or territorial claims. +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. # #country- #code coordinates TZ comments @@ -49,7 +37,7 @@ AQ -6736+06253 Antarctica/Mawson Mawson Station, Holme Bay AQ -6835+07758 Antarctica/Davis Davis Station, Vestfold Hills AQ -6617+11031 Antarctica/Casey Casey Station, Bailey Peninsula AQ -7824+10654 Antarctica/Vostok Vostok Station, Lake Vostok -AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Terre Adelie +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Adelie Land AQ -690022+0393524 Antarctica/Syowa Syowa Station, E Ongul I AQ -720041+0023206 Antarctica/Troll Troll Station, Queen Maud Land AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) @@ -128,7 +116,7 @@ CA +4901-08816 America/Nipigon Eastern Time - Ontario & Quebec - places that did CA +4823-08915 America/Thunder_Bay Eastern Time - Thunder Bay, Ontario CA +6344-06828 America/Iqaluit Eastern Time - east Nunavut - most locations CA +6608-06544 America/Pangnirtung Eastern Time - Pangnirtung, Nunavut -CA +744144-0944945 America/Resolute Central Standard Time - Resolute, Nunavut +CA +744144-0944945 America/Resolute Central Time - Resolute, Nunavut CA +484531-0913718 America/Atikokan Eastern Standard Time - Atikokan, Ontario and Southampton I, Nunavut CA +624900-0920459 America/Rankin_Inlet Central Time - central Nunavut CA +4953-09709 America/Winnipeg Central Time - Manitoba & west Ontario @@ -153,13 +141,10 @@ CH +4723+00832 Europe/Zurich CI +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga CL -3327-07040 America/Santiago most locations -CL -2709-10926 Pacific/Easter Easter Island & Sala y Gomez +CL -2709-10926 Pacific/Easter Easter Island CM +0403+00942 Africa/Douala -CN +3114+12128 Asia/Shanghai east China - Beijing, Guangdong, Shanghai, etc. -CN +4545+12641 Asia/Harbin Heilongjiang (except Mohe), Jilin -CN +2934+10635 Asia/Chongqing central China - Sichuan, Yunnan, Guangxi, Shaanxi, Guizhou, etc. -CN +4348+08735 Asia/Urumqi most of Tibet & Xinjiang -CN +3929+07559 Asia/Kashgar west Tibet & Xinjiang +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana @@ -322,7 +307,8 @@ PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands -PG -0930+14710 Pacific/Port_Moresby +PG -0930+14710 Pacific/Port_Moresby most locations +PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi PL +5215+02100 Europe/Warsaw @@ -341,24 +327,26 @@ RE -2052+05528 Indian/Reunion RO +4426+02606 Europe/Bucharest RS +4450+02030 Europe/Belgrade RU +5443+02030 Europe/Kaliningrad Moscow-01 - Kaliningrad -RU +5545+03735 Europe/Moscow Moscow+00 - west Russia -RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea -RU +5312+05009 Europe/Samara Moscow+00 - Samara, Udmurtia +RU +554521+0373704 Europe/Moscow Moscow+00 - west Russia RU +4457+03406 Europe/Simferopol Moscow+00 - Crimea +RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea +RU +5312+05009 Europe/Samara Moscow+00 (Moscow+01 after 2014-10-26) - Samara, Udmurtia RU +5651+06036 Asia/Yekaterinburg Moscow+02 - Urals RU +5500+07324 Asia/Omsk Moscow+03 - west Siberia RU +5502+08255 Asia/Novosibirsk Moscow+03 - Novosibirsk -RU +5345+08707 Asia/Novokuznetsk Moscow+03 - Novokuznetsk +RU +5345+08707 Asia/Novokuznetsk Moscow+03 (Moscow+04 after 2014-10-26) - Kemerovo RU +5601+09250 Asia/Krasnoyarsk Moscow+04 - Yenisei River RU +5216+10420 Asia/Irkutsk Moscow+05 - Lake Baikal +RU +5203+11328 Asia/Chita Moscow+06 (Moscow+05 after 2014-10-26) - Zabaykalsky RU +6200+12940 Asia/Yakutsk Moscow+06 - Lena River RU +623923+1353314 Asia/Khandyga Moscow+06 - Tomponsky, Ust-Maysky RU +4310+13156 Asia/Vladivostok Moscow+07 - Amur River RU +4658+14242 Asia/Sakhalin Moscow+07 - Sakhalin Island RU +643337+1431336 Asia/Ust-Nera Moscow+07 - Oymyakonsky -RU +5934+15048 Asia/Magadan Moscow+08 - Magadan -RU +5301+15839 Asia/Kamchatka Moscow+08 - Kamchatka -RU +6445+17729 Asia/Anadyr Moscow+08 - Bering Sea +RU +5934+15048 Asia/Magadan Moscow+08 (Moscow+07 after 2014-10-26) - Magadan +RU +6728+15343 Asia/Srednekolymsk Moscow+08 - E Sakha, N Kuril Is +RU +5301+15839 Asia/Kamchatka Moscow+08 (Moscow+09 after 2014-10-26) - Kamchatka +RU +6445+17729 Asia/Anadyr Moscow+08 (Moscow+09 after 2014-10-26) - Bering Sea RW -0157+03004 Africa/Kigali SA +2438+04643 Asia/Riyadh SB -0932+16012 Pacific/Guadalcanal @@ -425,13 +413,13 @@ US +394421-1045903 America/Denver Mountain Time US +433649-1161209 America/Boise Mountain Time - south Idaho & east Oregon US +332654-1120424 America/Phoenix Mountain Standard Time - Arizona (except Navajo) US +340308-1181434 America/Los_Angeles Pacific Time +US +550737-1313435 America/Metlakatla Pacific Standard Time - Annette Island, Alaska US +611305-1495401 America/Anchorage Alaska Time US +581807-1342511 America/Juneau Alaska Time - Alaska panhandle US +571035-1351807 America/Sitka Alaska Time - southeast Alaska panhandle US +593249-1394338 America/Yakutat Alaska Time - Alaska panhandle neck US +643004-1652423 America/Nome Alaska Time - west Alaska US +515248-1763929 America/Adak Aleutian Islands -US +550737-1313435 America/Metlakatla Metlakatla Time - Annette Island US +211825-1575130 Pacific/Honolulu Hawaii UY -3453-05611 America/Montevideo UZ +3940+06648 Asia/Samarkand west Uzbekistan diff --git a/awx/lib/site-packages/pytz/zoneinfo/zone1970.tab b/awx/lib/site-packages/pytz/zoneinfo/zone1970.tab new file mode 100644 index 0000000000..e971bc7f5a --- /dev/null +++ b/awx/lib/site-packages/pytz/zoneinfo/zone1970.tab @@ -0,0 +1,371 @@ +# tz zone descriptions +# +# This file is in the public domain. +# +# From Paul Eggert (2014-07-31): +# This file contains a table where each row stands for a zone where +# civil time stamps have agreed since 1970. Columns are separated by +# a single tab. Lines beginning with '#' are comments. All text uses +# UTF-8 encoding. The columns of the table are as follows: +# +# 1. The countries that overlap the zone, as a comma-separated list +# of ISO 3166 2-character country codes. See the file 'iso3166.tab'. +# 2. Latitude and longitude of the zone's principal location +# in ISO 6709 sign-degrees-minutes-seconds format, +# either +-DDMM+-DDDMM or +-DDMMSS+-DDDMMSS, +# first latitude (+ is north), then longitude (+ is east). +# 3. Zone name used in value of TZ environment variable. +# Please see the 'Theory' file for how zone names are chosen. +# If multiple zones overlap a country, each has a row in the +# table, with each column 1 containing the country code. +# 4. Comments; present if and only if a country has multiple zones. +# +# If a zone covers multiple countries, the most-populous city is used, +# and that country is listed first in column 1; any other countries +# are listed alphabetically by country code. The table is sorted +# first by country code, then (if possible) by an order within the +# country that (1) makes some geographical sense, and (2) puts the +# most populous zones first, where that does not contradict (1). +# +# This table is intended as an aid for users, to help them select time +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. +# +#country- +#codes coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE,OM +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AQ -6734-06808 Antarctica/Rothera Rothera Station, Adelaide Island +AQ -6448-06406 Antarctica/Palmer Palmer Station, Anvers Island +AQ -6736+06253 Antarctica/Mawson Mawson Station, Holme Bay +AQ -6835+07758 Antarctica/Davis Davis Station, Vestfold Hills +AQ -6617+11031 Antarctica/Casey Casey Station, Bailey Peninsula +AQ -7824+10654 Antarctica/Vostok Vostok Station, Lake Vostok +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Adélie Land +AQ -690022+0393524 Antarctica/Syowa Syowa Station, E Ongul I +AQ -720041+0023206 Antarctica/Troll Troll Station, Queen Maud Land +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba most locations (CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -5430+15857 Antarctica/Macquarie Macquarie Island +AU -4253+14719 Australia/Hobart Tasmania - most locations +AU -3956+14352 Australia/Currie Tasmania - King Island +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales - most locations +AU -3157+14127 Australia/Broken_Hill New South Wales - Yancowinna +AU -2728+15302 Australia/Brisbane Queensland - most locations +AU -2016+14900 Australia/Lindeman Queensland - Holiday Islands +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia - most locations +AU -3143+12852 Australia/Eucla Western Australia - Eucla area +AZ +4023+04951 Asia/Baku +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BG +4241+02319 Europe/Sofia +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Amapá, E Pará +BR -0343-03830 America/Fortaleza NE Brazil (MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo S & SE Brazil (GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem W Pará +BR -0846-06354 America/Porto_Velho Rondônia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus E Amazonas +BR -0640-06952 America/Eirunepe W Amazonas +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland Time, including SE Labrador +CA +4439-06336 America/Halifax Atlantic Time - Nova Scotia (most places), PEI +CA +4612-05957 America/Glace_Bay Atlantic Time - Nova Scotia - places that did not observe DST 1966-1971 +CA +4606-06447 America/Moncton Atlantic Time - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic Time - Labrador - most locations +CA +5125-05707 America/Blanc-Sablon Atlantic Standard Time - Quebec - Lower North Shore +CA +4339-07923 America/Toronto Eastern Time - Ontario & Quebec - most locations +CA +4901-08816 America/Nipigon Eastern Time - Ontario & Quebec - places that did not observe DST 1967-1973 +CA +4823-08915 America/Thunder_Bay Eastern Time - Thunder Bay, Ontario +CA +6344-06828 America/Iqaluit Eastern Time - east Nunavut - most locations +CA +6608-06544 America/Pangnirtung Eastern Time - Pangnirtung, Nunavut +CA +744144-0944945 America/Resolute Central Time - Resolute, Nunavut +CA +484531-0913718 America/Atikokan Eastern Standard Time - Atikokan, Ontario and Southampton I, Nunavut +CA +624900-0920459 America/Rankin_Inlet Central Time - central Nunavut +CA +4953-09709 America/Winnipeg Central Time - Manitoba & west Ontario +CA +4843-09434 America/Rainy_River Central Time - Rainy River & Fort Frances, Ontario +CA +5024-10439 America/Regina Central Standard Time - Saskatchewan - most locations +CA +5017-10750 America/Swift_Current Central Standard Time - Saskatchewan - midwest +CA +5333-11328 America/Edmonton Mountain Time - Alberta, east British Columbia & west Saskatchewan +CA +690650-1050310 America/Cambridge_Bay Mountain Time - west Nunavut +CA +6227-11421 America/Yellowknife Mountain Time - central Northwest Territories +CA +682059-1334300 America/Inuvik Mountain Time - west Northwest Territories +CA +4906-11631 America/Creston Mountain Standard Time - Creston, British Columbia +CA +5946-12014 America/Dawson_Creek Mountain Standard Time - Dawson Creek & Fort Saint John, British Columbia +CA +4916-12307 America/Vancouver Pacific Time - west British Columbia +CA +6043-13503 America/Whitehorse Pacific Time - south Yukon +CA +6404-13925 America/Dawson Pacific Time - north Yukon +CC -1210+09655 Indian/Cocos +CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CI,BF,GM,GN,ML,MR,SH,SL,SN,ST,TG +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago most locations +CL -2709-10926 Pacific/Easter Easter Island +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW,AW,BQ,SX +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia +CZ,SK +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin Berlin time +DK +5540+01235 Europe/Copenhagen +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil mainland +EC -0054-08936 Pacific/Galapagos Galápagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ES +4024-00341 Europe/Madrid mainland +ES +3553-00519 Africa/Ceuta Ceuta & Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +FI,AX +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk (Truk) and Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei (Ponape) +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GB,GG,IM,JE +513030-0000731 Europe/London +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab most locations +GL +7646-01840 America/Danmarkshavn east coast, north of Scoresbysund +GL +7029-02158 America/Scoresbysund Scoresbysund / Ittoqqortoormiit +GL +7634-06847 America/Thule Thule / Pituffik +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU,MP +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java & Sumatra +ID -0002+10920 Asia/Pontianak west & central Borneo +ID -0507+11924 Asia/Makassar east & south Borneo, Sulawesi (Celebes), Bali, Nusa Tengarra, west Timor +ID -0232+14042 Asia/Jayapura west New Guinea (Irian Jaya) & Malukus (Moluccas) +IE +5320-00615 Europe/Dublin +IL +314650+0351326 Asia/Jerusalem +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT,SM,VA +4154+01229 Europe/Rome +JM +175805-0764736 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KZ +4315+07657 Asia/Almaty most locations +KZ +4448+06528 Asia/Qyzylorda Qyzylorda (Kyzylorda, Kzyl-Orda) +KZ +5017+05710 Asia/Aqtobe Aqtobe (Aktobe) +KZ +4431+05016 Asia/Aqtau Atyrau (Atirau, Gur'yev), Mangghystau (Mankistau) +KZ +5113+05121 Asia/Oral West Kazakhstan +LB +3353+03530 Asia/Beirut +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +MH +0709+17112 Pacific/Majuro most locations +MH +0905+16720 Pacific/Kwajalein Kwajalein +MM +1647+09610 Asia/Rangoon +MN +4755+10653 Asia/Ulaanbaatar most locations +MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar +MO +2214+11335 Asia/Macau +MQ +1436-06105 America/Martinique +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MX +1924-09909 America/Mexico_City Central Time - most locations +MX +2105-08646 America/Cancun Central Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatán +MX +2540-10019 America/Monterrey Mexican Central Time - Coahuila, Durango, Nuevo León, Tamaulipas away from US border +MX +2550-09730 America/Matamoros US Central Time - Coahuila, Durango, Nuevo León, Tamaulipas near US border +MX +2313-10625 America/Mazatlan Mountain Time - S Baja, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mexican Mountain Time - Chihuahua away from US border +MX +2934-10425 America/Ojinaga US Mountain Time - Chihuahua near US border +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana US Pacific Time - Baja California near US border +MX +3018-11452 America/Santa_Isabel Mexican Pacific Time - Baja California away from US border +MX +2048-10515 America/Bahia_Banderas Mexican Central Time - Bahía de Banderas +MY +0310+10142 Asia/Kuala_Lumpur peninsular Malaysia +MY +0133+11020 Asia/Kuching Sabah & Sarawak +MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time (UTC+2) +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NF -2903+16758 Pacific/Norfolk +NG,AO,BJ,CD,CF,CG,CM,GA,GQ,NE +0627+00324 Africa/Lagos West Africa Time (UTC+1) +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO,SJ +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ,AQ -3652+17446 Pacific/Auckland New Zealand time +NZ -4357-17633 Pacific/Chatham Chatham Islands +PA,KY +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby most locations +PG -0613+15534 Pacific/Bougainville Bougainville +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon mainland +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA,BH +2517+05132 Asia/Qatar +RE,TF -2052+05528 Indian/Reunion Réunion, Crozet Is, Scattered Is +RO +4426+02606 Europe/Bucharest +RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad Moscow-01 - Kaliningrad +RU +554521+0373704 Europe/Moscow Moscow+00 - west Russia +RU +4457+03406 Europe/Simferopol Moscow+00 - Crimea +RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea +RU +5312+05009 Europe/Samara Moscow+00 (Moscow+01 after 2014-10-26) - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg Moscow+02 - Urals +RU +5500+07324 Asia/Omsk Moscow+03 - west Siberia +RU +5502+08255 Asia/Novosibirsk Moscow+03 - Novosibirsk +RU +5345+08707 Asia/Novokuznetsk Moscow+03 (Moscow+04 after 2014-10-26) - Kemerovo +RU +5601+09250 Asia/Krasnoyarsk Moscow+04 - Yenisei River +RU +5216+10420 Asia/Irkutsk Moscow+05 - Lake Baikal +RU +5203+11328 Asia/Chita Moscow+06 (Moscow+05 after 2014-10-26) - Zabaykalsky +RU +6200+12940 Asia/Yakutsk Moscow+06 - Lena River +RU +623923+1353314 Asia/Khandyga Moscow+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok Moscow+07 - Amur River +RU +4658+14242 Asia/Sakhalin Moscow+07 - Sakhalin Island +RU +643337+1431336 Asia/Ust-Nera Moscow+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan Moscow+08 (Moscow+07 after 2014-10-26) - Magadan +RU +6728+15343 Asia/Srednekolymsk Moscow+08 - E Sakha, N Kuril Is +RU +5301+15839 Asia/Kamchatka Moscow+08 (Moscow+09 after 2014-10-26) - Kamchatka +RU +6445+17729 Asia/Anadyr Moscow+08 (Moscow+09 after 2014-10-26) - Bering Sea +SA,KW,YE +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD,SS +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SR +0550-05510 America/Paramaribo +SV +1342-08912 America/El_Salvador +SY +3330+03618 Asia/Damascus +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen Kerguelen, St Paul I, Amsterdam I +TH,KH,LA,VN +1345+10031 Asia/Bangkok most of Indochina +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT,AG,AI,BL,DM,GD,GP,KN,LC,MF,MS,VC,VG,VI +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +UA +5026+03031 Europe/Kiev most locations +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye, E Lugansk / Zaporizhia, E Luhansk +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern Time +US +421953-0830245 America/Detroit Eastern Time - Michigan - most locations +US +381515-0854534 America/Kentucky/Louisville Eastern Time - Kentucky - Louisville area +US +364947-0845057 America/Kentucky/Monticello Eastern Time - Kentucky - Wayne County +US +394606-0860929 America/Indiana/Indianapolis Eastern Time - Indiana - most locations +US +384038-0873143 America/Indiana/Vincennes Eastern Time - Indiana - Daviess, Dubois, Knox & Martin Counties +US +410305-0863611 America/Indiana/Winamac Eastern Time - Indiana - Pulaski County +US +382232-0862041 America/Indiana/Marengo Eastern Time - Indiana - Crawford County +US +382931-0871643 America/Indiana/Petersburg Eastern Time - Indiana - Pike County +US +384452-0850402 America/Indiana/Vevay Eastern Time - Indiana - Switzerland County +US +415100-0873900 America/Chicago Central Time +US +375711-0864541 America/Indiana/Tell_City Central Time - Indiana - Perry County +US +411745-0863730 America/Indiana/Knox Central Time - Indiana - Starke County +US +450628-0873651 America/Menominee Central Time - Michigan - Dickinson, Gogebic, Iron & Menominee Counties +US +470659-1011757 America/North_Dakota/Center Central Time - North Dakota - Oliver County +US +465042-1012439 America/North_Dakota/New_Salem Central Time - North Dakota - Morton County (except Mandan area) +US +471551-1014640 America/North_Dakota/Beulah Central Time - North Dakota - Mercer County +US +394421-1045903 America/Denver Mountain Time +US +433649-1161209 America/Boise Mountain Time - south Idaho & east Oregon +US +332654-1120424 America/Phoenix Mountain Standard Time - Arizona (except Navajo) +US +340308-1181434 America/Los_Angeles Pacific Time +US +550737-1313435 America/Metlakatla Pacific Standard Time - Annette Island, Alaska +US +611305-1495401 America/Anchorage Alaska Time +US +581807-1342511 America/Juneau Alaska Time - Alaska panhandle +US +571035-1351807 America/Sitka Alaska Time - southeast Alaska panhandle +US +593249-1394338 America/Yakutat Alaska Time - Alaska panhandle neck +US +643004-1652423 America/Nome Alaska Time - west Alaska +US +515248-1763929 America/Adak Aleutian Islands +US,UM +211825-1575130 Pacific/Honolulu Hawaii time +UY -3453-05611 America/Montevideo +UZ +3940+06648 Asia/Samarkand west Uzbekistan +UZ +4120+06918 Asia/Tashkent east Uzbekistan +VE +1030-06656 America/Caracas +VN +1045+10640 Asia/Ho_Chi_Minh south Vietnam +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/awx/lib/site-packages/requests/__init__.py b/awx/lib/site-packages/requests/__init__.py index bba190029e..ac2b06c86c 100644 --- a/awx/lib/site-packages/requests/__init__.py +++ b/awx/lib/site-packages/requests/__init__.py @@ -13,7 +13,7 @@ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests - >>> r = requests.get('http://python.org') + >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content @@ -22,7 +22,7 @@ usage: ... or POST: >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post("http://httpbin.org/post", data=payload) + >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... @@ -42,8 +42,8 @@ is at <http://python-requests.org>. """ __title__ = 'requests' -__version__ = '2.3.0' -__build__ = 0x020300 +__version__ = '2.5.1' +__build__ = 0x020501 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2014 Kenneth Reitz' diff --git a/awx/lib/site-packages/requests/adapters.py b/awx/lib/site-packages/requests/adapters.py index eb7a2d282f..c892853b29 100644 --- a/awx/lib/site-packages/requests/adapters.py +++ b/awx/lib/site-packages/requests/adapters.py @@ -11,20 +11,25 @@ and maintain connections. import socket from .models import Response +from .packages.urllib3 import Retry from .packages.urllib3.poolmanager import PoolManager, proxy_from_url from .packages.urllib3.response import HTTPResponse from .packages.urllib3.util import Timeout as TimeoutSauce -from .compat import urlparse, basestring, urldefrag, unquote +from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, - prepend_scheme_if_needed, get_auth_from_url) + prepend_scheme_if_needed, get_auth_from_url, urldefragauth) from .structures import CaseInsensitiveDict -from .packages.urllib3.exceptions import MaxRetryError -from .packages.urllib3.exceptions import TimeoutError -from .packages.urllib3.exceptions import SSLError as _SSLError +from .packages.urllib3.exceptions import ConnectTimeoutError from .packages.urllib3.exceptions import HTTPError as _HTTPError +from .packages.urllib3.exceptions import MaxRetryError from .packages.urllib3.exceptions import ProxyError as _ProxyError +from .packages.urllib3.exceptions import ProtocolError +from .packages.urllib3.exceptions import ReadTimeoutError +from .packages.urllib3.exceptions import SSLError as _SSLError +from .packages.urllib3.exceptions import ResponseError from .cookies import extract_cookies_to_jar -from .exceptions import ConnectionError, Timeout, SSLError, ProxyError +from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, + ProxyError, RetryError) from .auth import _basic_auth_str DEFAULT_POOLBLOCK = False @@ -56,8 +61,12 @@ class HTTPAdapter(BaseAdapter): :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param int max_retries: The maximum number of retries each connection - should attempt. Note, this applies only to failed connections and - timeouts, never to requests where the server returns a response. + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: @@ -73,7 +82,10 @@ class HTTPAdapter(BaseAdapter): def __init__(self, pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, pool_block=DEFAULT_POOLBLOCK): - self.max_retries = max_retries + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) self.config = {} self.proxy_manager = {} @@ -101,14 +113,17 @@ class HTTPAdapter(BaseAdapter): self.init_poolmanager(self._pool_connections, self._pool_maxsize, block=self._pool_block) - def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK): - """Initializes a urllib3 PoolManager. This method should not be called - from user code, and is only exposed for use when subclassing the + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections @@ -116,7 +131,30 @@ class HTTPAdapter(BaseAdapter): self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, - block=block) + block=block, strict=True, **pool_kwargs) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + """ + if not proxy in self.proxy_manager: + proxy_headers = self.proxy_headers(proxy) + self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs) + + return self.proxy_manager[proxy] def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user @@ -204,17 +242,8 @@ class HTTPAdapter(BaseAdapter): if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') - proxy_headers = self.proxy_headers(proxy) - - if not proxy in self.proxy_manager: - self.proxy_manager[proxy] = proxy_from_url( - proxy, - proxy_headers=proxy_headers, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block) - - conn = self.proxy_manager[proxy].connection_from_url(url) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) else: # Only scheme should be lower case parsed = urlparse(url) @@ -249,7 +278,7 @@ class HTTPAdapter(BaseAdapter): proxy = proxies.get(scheme) if proxy and scheme != 'https': - url, _ = urldefrag(request.url) + url = urldefragauth(request.url) else: url = request.path_url @@ -296,7 +325,10 @@ class HTTPAdapter(BaseAdapter): :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) The timeout on the request. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a (`connect timeout, read + timeout <user/advanced.html#timeouts>`_) tuple. + :type timeout: float or tuple :param verify: (optional) Whether to verify SSL certificates. :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. @@ -310,7 +342,18 @@ class HTTPAdapter(BaseAdapter): chunked = not (request.body is None or 'Content-Length' in request.headers) - timeout = TimeoutSauce(connect=timeout, read=timeout) + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError as e: + # this may raise a string formatting error. + err = ("Invalid timeout {0}. Pass a (connect, read) " + "timeout tuple, or a single float to set " + "both timeouts to the same value".format(timeout)) + raise ValueError(err) + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) try: if not chunked: @@ -368,10 +411,16 @@ class HTTPAdapter(BaseAdapter): # All is well, return the connection to the pool. conn._put_conn(low_conn) - except socket.error as sockerr: - raise ConnectionError(sockerr, request=request) + except (ProtocolError, socket.error) as err: + raise ConnectionError(err, request=request) except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + raise ConnectionError(e, request=request) except _ProxyError as e: @@ -380,8 +429,8 @@ class HTTPAdapter(BaseAdapter): except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): raise SSLError(e, request=request) - elif isinstance(e, TimeoutError): - raise Timeout(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) else: raise diff --git a/awx/lib/site-packages/requests/api.py b/awx/lib/site-packages/requests/api.py index 01d853d5ca..1469b05c49 100644 --- a/awx/lib/site-packages/requests/api.py +++ b/awx/lib/site-packages/requests/api.py @@ -22,12 +22,17 @@ def request(method, url, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) Float describing the timeout of the request in seconds. + :param timeout: (optional) How long to wait for the server to send data + before giving up, as a float, or a (`connect timeout, read timeout + <user/advanced.html#timeouts>`_) tuple. + :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. + :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param stream: (optional) if ``False``, the response content will be immediately downloaded. @@ -41,7 +46,12 @@ def request(method, url, **kwargs): """ session = sessions.Session() - return session.request(method=method, url=url, **kwargs) + response = session.request(method=method, url=url, **kwargs) + # By explicitly closing the session, we avoid leaving sockets open which + # can trigger a ResourceWarning in some cases, and look like a memory leak + # in others. + session.close() + return response def get(url, **kwargs): @@ -77,15 +87,16 @@ def head(url, **kwargs): return request('head', url, **kwargs) -def post(url, data=None, **kwargs): +def post(url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ - return request('post', url, data=data, **kwargs) + return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): diff --git a/awx/lib/site-packages/requests/auth.py b/awx/lib/site-packages/requests/auth.py index 9f831b7ad0..b950181d9e 100644 --- a/awx/lib/site-packages/requests/auth.py +++ b/awx/lib/site-packages/requests/auth.py @@ -16,7 +16,8 @@ from base64 import b64encode from .compat import urlparse, str from .cookies import extract_cookies_to_jar -from .utils import parse_dict_header +from .utils import parse_dict_header, to_native_string +from .status_codes import codes CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_MULTI_PART = 'multipart/form-data' @@ -25,7 +26,11 @@ CONTENT_TYPE_MULTI_PART = 'multipart/form-data' def _basic_auth_str(username, password): """Returns a Basic Auth string.""" - return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1') + authstr = 'Basic ' + to_native_string( + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip() + ) + + return authstr class AuthBase(object): @@ -62,6 +67,7 @@ class HTTPDigestAuth(AuthBase): self.nonce_count = 0 self.chal = {} self.pos = None + self.num_401_calls = 1 def build_digest_header(self, method, url): @@ -146,6 +152,11 @@ class HTTPDigestAuth(AuthBase): return 'Digest %s' % (base) + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self.num_401_calls = 1 + def handle_401(self, r, **kwargs): """Takes the given response and tries digest-auth, if needed.""" @@ -158,7 +169,7 @@ class HTTPDigestAuth(AuthBase): if 'digest' in s_auth.lower() and num_401_calls < 2: - setattr(self, 'num_401_calls', num_401_calls + 1) + self.num_401_calls += 1 pat = re.compile(r'digest ', flags=re.IGNORECASE) self.chal = parse_dict_header(pat.sub('', s_auth, count=1)) @@ -178,7 +189,7 @@ class HTTPDigestAuth(AuthBase): return _r - setattr(self, 'num_401_calls', 1) + self.num_401_calls = 1 return r def __call__(self, r): @@ -188,6 +199,11 @@ class HTTPDigestAuth(AuthBase): try: self.pos = r.body.tell() except AttributeError: - pass + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self.pos = None r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) return r diff --git a/awx/lib/site-packages/requests/certs.py b/awx/lib/site-packages/requests/certs.py index bc00826191..07e6475070 100644 --- a/awx/lib/site-packages/requests/certs.py +++ b/awx/lib/site-packages/requests/certs.py @@ -11,14 +11,15 @@ If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ - import os.path - -def where(): - """Return the preferred certificate bundle.""" - # vendored bundle inside Requests - return os.path.join(os.path.dirname(__file__), 'cacert.pem') +try: + from certifi import where +except ImportError: + def where(): + """Return the preferred certificate bundle.""" + # vendored bundle inside Requests + return os.path.join(os.path.dirname(__file__), 'cacert.pem') if __name__ == '__main__': print(where()) diff --git a/awx/lib/site-packages/requests/compat.py b/awx/lib/site-packages/requests/compat.py index bdf10d6a9f..c07726ee45 100644 --- a/awx/lib/site-packages/requests/compat.py +++ b/awx/lib/site-packages/requests/compat.py @@ -75,7 +75,9 @@ is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. try: import simplejson as json -except ImportError: +except (ImportError, SyntaxError): + # simplejson does not support Python 3.2, it throws a SyntaxError + # because of u'...' Unicode literals. import json # --------- @@ -90,7 +92,6 @@ if is_py2: from Cookie import Morsel from StringIO import StringIO from .packages.urllib3.packages.ordered_dict import OrderedDict - from httplib import IncompleteRead builtin_str = str bytes = str @@ -106,7 +107,6 @@ elif is_py3: from http.cookies import Morsel from io import StringIO from collections import OrderedDict - from http.client import IncompleteRead builtin_str = str str = str diff --git a/awx/lib/site-packages/requests/exceptions.py b/awx/lib/site-packages/requests/exceptions.py index a4ee9d630c..89135a802e 100644 --- a/awx/lib/site-packages/requests/exceptions.py +++ b/awx/lib/site-packages/requests/exceptions.py @@ -44,7 +44,23 @@ class SSLError(ConnectionError): class Timeout(RequestException): - """The request timed out.""" + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" class URLRequired(RequestException): @@ -73,3 +89,11 @@ class ChunkedEncodingError(RequestException): class ContentDecodingError(RequestException, BaseHTTPError): """Failed to decode response content""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed""" + + +class RetryError(RequestException): + """Custom retries logic failed""" diff --git a/awx/lib/site-packages/requests/models.py b/awx/lib/site-packages/requests/models.py index 120968ff51..b728c84e41 100644 --- a/awx/lib/site-packages/requests/models.py +++ b/awx/lib/site-packages/requests/models.py @@ -19,31 +19,35 @@ from .cookies import cookiejar_from_dict, get_cookie_header from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url -from .packages.urllib3.exceptions import DecodeError +from .packages.urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) from .exceptions import ( - HTTPError, RequestException, MissingSchema, InvalidURL, - ChunkedEncodingError, ContentDecodingError) + HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, + ContentDecodingError, ConnectionError, StreamConsumedError) from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, - is_py2, chardet, json, builtin_str, basestring, IncompleteRead) + is_py2, chardet, json, builtin_str, basestring) from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_moved, # 307 + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 ) DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 +json_dumps = json.dumps + class RequestEncodingMixin(object): @property @@ -187,7 +191,8 @@ class Request(RequestHooksMixin): :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach the request. If a dictionary is provided, form-encoding will take place. + :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. + :param json: json for the body to attach to the request (if data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. @@ -210,7 +215,8 @@ class Request(RequestHooksMixin): params=None, auth=None, cookies=None, - hooks=None): + hooks=None, + json=None): # Default empty dicts for dict params. data = [] if data is None else data @@ -228,6 +234,7 @@ class Request(RequestHooksMixin): self.headers = headers self.files = files self.data = data + self.json = json self.params = params self.auth = auth self.cookies = cookies @@ -244,6 +251,7 @@ class Request(RequestHooksMixin): headers=self.headers, files=self.files, data=self.data, + json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, @@ -287,14 +295,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, - data=None, params=None, auth=None, cookies=None, hooks=None): + data=None, params=None, auth=None, cookies=None, hooks=None, + json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) - self.prepare_body(data, files) + self.prepare_body(data, files, json) self.prepare_auth(auth, url) # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. @@ -309,8 +318,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p = PreparedRequest() p.method = self.method p.url = self.url - p.headers = self.headers.copy() - p._cookies = self._cookies.copy() + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = self._cookies.copy() if self._cookies is not None else None p.body = self.body p.hooks = self.hooks return p @@ -324,21 +333,27 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. - try: - url = unicode(url) - except NameError: - # We're on Python 3. - url = str(url) - except UnicodeDecodeError: - pass + #: We're unable to blindy call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/kennethreitz/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) - # Don't do any URL preparation for oddball schemes + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return # Support for unicode domain names and paths. - scheme, auth, host, port, path, query, fragment = parse_url(url) + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) if not scheme: raise MissingSchema("Invalid URL {0!r}: No schema supplied. " @@ -395,7 +410,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): else: self.headers = CaseInsensitiveDict() - def prepare_body(self, data, files): + def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" # Check if file, fo, generator, iterator. @@ -406,6 +421,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): content_type = None length = None + if json is not None: + content_type = 'application/json' + body = json_dumps(json) + is_stream = all([ hasattr(data, '__iter__'), not isinstance(data, (basestring, list, tuple, dict)) @@ -431,9 +450,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if files: (body, content_type) = self._encode_files(files, data) else: - if data: + if data and json is None: body = self._encode_params(data) - if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'): + if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' @@ -441,7 +460,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. - if (content_type) and (not 'content-type' in self.headers): + if content_type and ('content-type' not in self.headers): self.headers['Content-Type'] = content_type self.body = body @@ -455,7 +474,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): l = super_len(body) if l: self.headers['Content-Length'] = builtin_str(l) - elif self.method not in ('GET', 'HEAD'): + elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None): self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): @@ -556,6 +575,10 @@ class Response(object): #: and the arrival of the response (as a timedelta) self.elapsed = datetime.timedelta(0) + #: The :class:`PreparedRequest <PreparedRequest>` object to which this + #: is a response. + self.request = None + def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. @@ -594,7 +617,7 @@ class Response(object): def ok(self): try: self.raise_for_status() - except RequestException: + except HTTPError: return False return True @@ -605,6 +628,11 @@ class Response(object): """ return ('location' in self.headers and self.status_code in REDIRECT_STATI) + @property + def is_permanent_redirect(self): + """True if this Response one of the permanant versions of redirect""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + @property def apparent_encoding(self): """The apparent encoding, provided by the chardet library""" @@ -626,10 +654,12 @@ class Response(object): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk - except IncompleteRead as e: + except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) except AttributeError: # Standard file-like object. while True: @@ -640,6 +670,8 @@ class Response(object): self._content_consumed = True + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) @@ -652,7 +684,7 @@ class Response(object): return chunks - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None): + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. @@ -664,7 +696,11 @@ class Response(object): if pending is not None: chunk = pending + chunk - lines = chunk.splitlines() + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() diff --git a/awx/lib/site-packages/requests/packages/charade/__init__.py b/awx/lib/site-packages/requests/packages/charade/__init__.py deleted file mode 100644 index 26362e9739..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -__version__ = "1.0.3" -from sys import version_info - - -def detect(aBuf): - if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or - (version_info >= (3, 0) and not isinstance(aBuf, bytes))): - raise ValueError('Expected a bytes object, not a unicode object') - - from . import universaldetector - u = universaldetector.UniversalDetector() - u.reset() - u.feed(aBuf) - u.close() - return u.result - -def _description_of(path): - """Return a string describing the probable encoding of a file.""" - from charade.universaldetector import UniversalDetector - - u = UniversalDetector() - for line in open(path, 'rb'): - u.feed(line) - u.close() - result = u.result - if result['encoding']: - return '%s: %s with confidence %s' % (path, - result['encoding'], - result['confidence']) - else: - return '%s: no result' % path - - -def charade_cli(): - """ - Script which takes one or more file paths and reports on their detected - encodings - - Example:: - - % chardetect.py somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - - """ - from sys import argv - for path in argv[1:]: - print(_description_of(path)) - \ No newline at end of file diff --git a/awx/lib/site-packages/requests/packages/charade/__main__.py b/awx/lib/site-packages/requests/packages/charade/__main__.py deleted file mode 100644 index c0d587fa9e..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/__main__.py +++ /dev/null @@ -1,7 +0,0 @@ -''' -support ';python -m charade <file1> [file2] ...' package execution syntax (2.7+) -''' - -from charade import charade_cli - -charade_cli() diff --git a/awx/lib/site-packages/requests/packages/charade/big5freq.py b/awx/lib/site-packages/requests/packages/charade/big5freq.py deleted file mode 100644 index 65bffc04b0..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/big5freq.py +++ /dev/null @@ -1,925 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# <http://www.edu.tw:81/mandr/> -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -Big5CharToFreqOrder = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 -#Everything below is of no interest for detection purpose -2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 -2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 -5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 -5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 -5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 -5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 -5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 -5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 -5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 -5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 -5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 -5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 -5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 -5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 -6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 -6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 -6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 -6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 -6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 -6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 -6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 -6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 -6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 -6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 -6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 -6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 -6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 -6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 -6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 -6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 -6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 -6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 -6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 -6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 -6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 -6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 -6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 -6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 -6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 -6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 -6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 -6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 -6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 -6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 -6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 -6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 -6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 -6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 -6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 -6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 -6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 -6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 -6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 -6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 -6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 -3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 -6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 -6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 -3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 -6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 -6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 -6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 -6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 -6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 -6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 -6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 -4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 -6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 -6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 -3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 -6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 -6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 -6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 -6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 -6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 -6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 -6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 -6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 -6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 -6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 -6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 -7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 -7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 -7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 -7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 -7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 -7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 -7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 -7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 -7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 -7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 -7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 -7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 -7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 -7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 -7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 -7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 -7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 -7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 -7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 -7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 -7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 -7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 -7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 -7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 -7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 -7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 -7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 -7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 -7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 -7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 -7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 -7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 -7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 -7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 -7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 -7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 -7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 -7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 -7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 -7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 -7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 -7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 -7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 -7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 -7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 -7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 -7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 -7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 -3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 -7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 -7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 -7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 -7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 -4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 -7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 -7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 -7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 -7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 -7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 -7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 -7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 -7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 -7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 -7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 -7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 -8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 -8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 -8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 -8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 -8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 -8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 -8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 -8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 -8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 -8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 -8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 -8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 -8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 -8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 -8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 -8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 -8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 -8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 -8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 -8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 -8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 -8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 -8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 -8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 -8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 -8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 -8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 -8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 -8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 -8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 -8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 -8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 -8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 -8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 -8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 -8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 -8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 -8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 -8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 -8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 -8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 -8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 -8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 -8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 -8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 -8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 -8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 -8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 -8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 -8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 -8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 -8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 -8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 -8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 -8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 -8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 -8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 -8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 -8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 -8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 -8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 -4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 -8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 -8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 -8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 -8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 -9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 -9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 -9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 -9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 -9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 -9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 -9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 -9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 -9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 -9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 -9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 -9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 -9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 -9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 -9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 -9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 -9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 -9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 -9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 -9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 -9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 -9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 -9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 -9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 -9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 -9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 -9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 -9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 -9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 -9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 -9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 -9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 -9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 -9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 -9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 -9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 -9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 -9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 -3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 -9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 -9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 -9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 -4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 -9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 -9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 -9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 -9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 -9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 -9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 -9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 -9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 -9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 -9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 -9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 -9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 -9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 -9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 -9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 -9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 -9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 -9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 -9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 -9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 -9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 -9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 -9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 -10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 -10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 -10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 -10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 -10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 -10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 -10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 -10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 -10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 -4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 -10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 -10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 -10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 -10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 -10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 -10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 -10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 -10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 -4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 -10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 -10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 -10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 -10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 -10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 -10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 -10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 -10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 -10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 -10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 -10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 -10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 -10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 -10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 -10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 -10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 -10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 -4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 -10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 -10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 -10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 -10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 -10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 -10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 -10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 -10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 -10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 -10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 -10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 -10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 -10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 -10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 -10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 -10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 -10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 -10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 -10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 -10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 -10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 -10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 -10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 -10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 -10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 -10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 -10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 -10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 -10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 -10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 -11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 -11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 -11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 -4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 -11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 -11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 -11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 -11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 -11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 -11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 -11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 -11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 -11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 -11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 -11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 -11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 -11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 -11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 -11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 -11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 -11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 -11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 -11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 -11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 -11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 -11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 -11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 -11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 -11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 -11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 -11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 -11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 -11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 -11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 -11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 -11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 -4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 -11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 -11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 -11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 -11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 -11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 -11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 -11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 -11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 -11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 -11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 -11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 -11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 -11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 -11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 -11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 -11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 -11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 -11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 -11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 -11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 -11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 -11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 -11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 -11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 -11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 -11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 -11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 -11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 -12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 -12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 -12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 -12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 -12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 -12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 -12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 -12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 -12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 -12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 -12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 -12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 -12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 -12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 -12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 -4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 -4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 -4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 -12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 -12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 -12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 -12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 -12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 -12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 -12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 -12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 -12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 -12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 -12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 -12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 -12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 -12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 -12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 -12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 -12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 -12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 -12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 -12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 -12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 -12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 -12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 -12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 -12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 -12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 -12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 -12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 -12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 -12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 -12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 -12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 -12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 -12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 -12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 -12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 -12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 -12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 -12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 -12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 -12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 -12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 -12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 -12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 -12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 -12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 -12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 -4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 -13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 -13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 -13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 -13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 -13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 -13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 -13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 -4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 -13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 -13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 -13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 -13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 -13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 -13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 -13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 -13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 -13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 -13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 -13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 -13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 -13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 -13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 -13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 -5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 -13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 -13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 -13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 -13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 -13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 -13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 -13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 -13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 -13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 -13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 -13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 -13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 -13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 -13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 -13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 -13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 -13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 -13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 -13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 -13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 -13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 -13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 -13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 -13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 -13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 -13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 -13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 -13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 -13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 -13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 -13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 -13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 -13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 -13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 -13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 -13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 -13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 -13968,13969,13970,13971,13972) #13973 - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/big5prober.py b/awx/lib/site-packages/requests/packages/charade/big5prober.py deleted file mode 100644 index 7382f7c5d4..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/big5prober.py +++ /dev/null @@ -1,42 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import Big5SMModel - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(Big5SMModel) - self._mDistributionAnalyzer = Big5DistributionAnalysis() - self.reset() - - def get_charset_name(self): - return "Big5" diff --git a/awx/lib/site-packages/requests/packages/charade/chardistribution.py b/awx/lib/site-packages/requests/packages/charade/chardistribution.py deleted file mode 100644 index dfd3355e91..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/chardistribution.py +++ /dev/null @@ -1,231 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) -from .compat import wrap_ord - -ENOUGH_DATA_THRESHOLD = 1024 -SURE_YES = 0.99 -SURE_NO = 0.01 -MINIMUM_DATA_THRESHOLD = 3 - - -class CharDistributionAnalysis: - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._mCharToFreqOrder = None - self._mTableSize = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self._mTypicalDistributionRatio = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._mDone = False - self._mTotalChars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._mFreqChars = 0 - - def feed(self, aBuf, aCharLen): - """feed a character with known length""" - if aCharLen == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(aBuf) - else: - order = -1 - if order >= 0: - self._mTotalChars += 1 - # order is valid - if order < self._mTableSize: - if 512 > self._mCharToFreqOrder[order]: - self._mFreqChars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD: - return SURE_NO - - if self._mTotalChars != self._mFreqChars: - r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars) - * self._mTypicalDistributionRatio)) - if r < SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._mTotalChars > ENOUGH_DATA_THRESHOLD - - def get_order(self, aBuf): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = EUCTWCharToFreqOrder - self._mTableSize = EUCTW_TABLE_SIZE - self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = wrap_ord(aBuf[0]) - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = EUCKRCharToFreqOrder - self._mTableSize = EUCKR_TABLE_SIZE - self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = wrap_ord(aBuf[0]) - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = GB2312CharToFreqOrder - self._mTableSize = GB2312_TABLE_SIZE - self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = Big5CharToFreqOrder - self._mTableSize = BIG5_TABLE_SIZE - self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = JISCharToFreqOrder - self._mTableSize = JIS_TABLE_SIZE - self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1]) - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - CharDistributionAnalysis.__init__(self) - self._mCharToFreqOrder = JISCharToFreqOrder - self._mTableSize = JIS_TABLE_SIZE - self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, aBuf): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = wrap_ord(aBuf[0]) - if char >= 0xA0: - return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1 - else: - return -1 diff --git a/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py b/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py deleted file mode 100644 index 2959654748..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/charsetgroupprober.py +++ /dev/null @@ -1,106 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from . import constants -import sys -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self._mActiveNum = 0 - self._mProbers = [] - self._mBestGuessProber = None - - def reset(self): - CharSetProber.reset(self) - self._mActiveNum = 0 - for prober in self._mProbers: - if prober: - prober.reset() - prober.active = True - self._mActiveNum += 1 - self._mBestGuessProber = None - - def get_charset_name(self): - if not self._mBestGuessProber: - self.get_confidence() - if not self._mBestGuessProber: - return None -# self._mBestGuessProber = self._mProbers[0] - return self._mBestGuessProber.get_charset_name() - - def feed(self, aBuf): - for prober in self._mProbers: - if not prober: - continue - if not prober.active: - continue - st = prober.feed(aBuf) - if not st: - continue - if st == constants.eFoundIt: - self._mBestGuessProber = prober - return self.get_state() - elif st == constants.eNotMe: - prober.active = False - self._mActiveNum -= 1 - if self._mActiveNum <= 0: - self._mState = constants.eNotMe - return self.get_state() - return self.get_state() - - def get_confidence(self): - st = self.get_state() - if st == constants.eFoundIt: - return 0.99 - elif st == constants.eNotMe: - return 0.01 - bestConf = 0.0 - self._mBestGuessProber = None - for prober in self._mProbers: - if not prober: - continue - if not prober.active: - if constants._debug: - sys.stderr.write(prober.get_charset_name() - + ' not active\n') - continue - cf = prober.get_confidence() - if constants._debug: - sys.stderr.write('%s confidence = %s\n' % - (prober.get_charset_name(), cf)) - if bestConf < cf: - bestConf = cf - self._mBestGuessProber = prober - if not self._mBestGuessProber: - return 0.0 - return bestConf -# else: -# self._mBestGuessProber = self._mProbers[0] -# return self._mBestGuessProber.get_confidence() diff --git a/awx/lib/site-packages/requests/packages/charade/charsetprober.py b/awx/lib/site-packages/requests/packages/charade/charsetprober.py deleted file mode 100644 index 97581712c1..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/charsetprober.py +++ /dev/null @@ -1,62 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from . import constants -import re - - -class CharSetProber: - def __init__(self): - pass - - def reset(self): - self._mState = constants.eDetecting - - def get_charset_name(self): - return None - - def feed(self, aBuf): - pass - - def get_state(self): - return self._mState - - def get_confidence(self): - return 0.0 - - def filter_high_bit_only(self, aBuf): - aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf) - return aBuf - - def filter_without_english_letters(self, aBuf): - aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf) - return aBuf - - def filter_with_english_letters(self, aBuf): - # TODO - return aBuf diff --git a/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py b/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py deleted file mode 100644 index 1bda9ff162..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/codingstatemachine.py +++ /dev/null @@ -1,61 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .constants import eStart -from .compat import wrap_ord - - -class CodingStateMachine: - def __init__(self, sm): - self._mModel = sm - self._mCurrentBytePos = 0 - self._mCurrentCharLen = 0 - self.reset() - - def reset(self): - self._mCurrentState = eStart - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - # PY3K: aBuf is a byte stream, so c is an int, not a byte - byteCls = self._mModel['classTable'][wrap_ord(c)] - if self._mCurrentState == eStart: - self._mCurrentBytePos = 0 - self._mCurrentCharLen = self._mModel['charLenTable'][byteCls] - # from byte's class and stateTable, we get its next state - curr_state = (self._mCurrentState * self._mModel['classFactor'] - + byteCls) - self._mCurrentState = self._mModel['stateTable'][curr_state] - self._mCurrentBytePos += 1 - return self._mCurrentState - - def get_current_charlen(self): - return self._mCurrentCharLen - - def get_coding_state_machine(self): - return self._mModel['name'] diff --git a/awx/lib/site-packages/requests/packages/charade/compat.py b/awx/lib/site-packages/requests/packages/charade/compat.py deleted file mode 100644 index d9e30addf9..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Ian Cordasco - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - base_str = (str, unicode) -else: - base_str = (bytes, str) - - -def wrap_ord(a): - if sys.version_info < (3, 0) and isinstance(a, base_str): - return ord(a) - else: - return a diff --git a/awx/lib/site-packages/requests/packages/charade/constants.py b/awx/lib/site-packages/requests/packages/charade/constants.py deleted file mode 100644 index a3d27de250..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/constants.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -_debug = 0 - -eDetecting = 0 -eFoundIt = 1 -eNotMe = 2 - -eStart = 0 -eError = 1 -eItsMe = 2 - -SHORTCUT_THRESHOLD = 0.95 diff --git a/awx/lib/site-packages/requests/packages/charade/cp949prober.py b/awx/lib/site-packages/requests/packages/charade/cp949prober.py deleted file mode 100644 index 543501fe09..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/cp949prober.py +++ /dev/null @@ -1,44 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import CP949SMModel - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(CP949SMModel) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self._mDistributionAnalyzer = EUCKRDistributionAnalysis() - self.reset() - - def get_charset_name(self): - return "CP949" diff --git a/awx/lib/site-packages/requests/packages/charade/escprober.py b/awx/lib/site-packages/requests/packages/charade/escprober.py deleted file mode 100644 index 0063935ce6..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/escprober.py +++ /dev/null @@ -1,86 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from . import constants -from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, - ISO2022KRSMModel) -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .compat import wrap_ord - - -class EscCharSetProber(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self._mCodingSM = [ - CodingStateMachine(HZSMModel), - CodingStateMachine(ISO2022CNSMModel), - CodingStateMachine(ISO2022JPSMModel), - CodingStateMachine(ISO2022KRSMModel) - ] - self.reset() - - def reset(self): - CharSetProber.reset(self) - for codingSM in self._mCodingSM: - if not codingSM: - continue - codingSM.active = True - codingSM.reset() - self._mActiveSM = len(self._mCodingSM) - self._mDetectedCharset = None - - def get_charset_name(self): - return self._mDetectedCharset - - def get_confidence(self): - if self._mDetectedCharset: - return 0.99 - else: - return 0.00 - - def feed(self, aBuf): - for c in aBuf: - # PY3K: aBuf is a byte array, so c is an int, not a byte - for codingSM in self._mCodingSM: - if not codingSM: - continue - if not codingSM.active: - continue - codingState = codingSM.next_state(wrap_ord(c)) - if codingState == constants.eError: - codingSM.active = False - self._mActiveSM -= 1 - if self._mActiveSM <= 0: - self._mState = constants.eNotMe - return self.get_state() - elif codingState == constants.eItsMe: - self._mState = constants.eFoundIt - self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 - return self.get_state() - - return self.get_state() diff --git a/awx/lib/site-packages/requests/packages/charade/escsm.py b/awx/lib/site-packages/requests/packages/charade/escsm.py deleted file mode 100644 index 1cf3aa6db6..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/escsm.py +++ /dev/null @@ -1,242 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .constants import eStart, eError, eItsMe - -HZ_cls = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_st = ( -eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07 -eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f -eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17 - 5,eError, 6,eError, 5, 5, 4,eError,# 18-1f - 4,eError, 4, 4, 4,eError, 4,eError,# 20-27 - 4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f -) - -HZCharLenTable = (0, 0, 0, 0, 0, 0) - -HZSMModel = {'classTable': HZ_cls, - 'classFactor': 6, - 'stateTable': HZ_st, - 'charLenTable': HZCharLenTable, - 'name': "HZ-GB-2312"} - -ISO2022CN_cls = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_st = ( -eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 -eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f -eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 -eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f -eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27 - 5, 6,eError,eError,eError,eError,eError,eError,# 28-2f -eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37 -eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f -) - -ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CNSMModel = {'classTable': ISO2022CN_cls, - 'classFactor': 9, - 'stateTable': ISO2022CN_st, - 'charLenTable': ISO2022CNCharLenTable, - 'name': "ISO-2022-CN"} - -ISO2022JP_cls = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_st = ( -eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07 -eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f -eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17 -eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f -eError, 5,eError,eError,eError, 4,eError,eError,# 20-27 -eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f -eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37 -eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f -eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47 -) - -ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JPSMModel = {'classTable': ISO2022JP_cls, - 'classFactor': 10, - 'stateTable': ISO2022JP_st, - 'charLenTable': ISO2022JPCharLenTable, - 'name': "ISO-2022-JP"} - -ISO2022KR_cls = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_st = ( -eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07 -eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f -eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17 -eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f -eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27 -) - -ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0) - -ISO2022KRSMModel = {'classTable': ISO2022KR_cls, - 'classFactor': 6, - 'stateTable': ISO2022KR_st, - 'charLenTable': ISO2022KRCharLenTable, - 'name': "ISO-2022-KR"} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/eucjpprober.py b/awx/lib/site-packages/requests/packages/charade/eucjpprober.py deleted file mode 100644 index d70cfbbb01..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/eucjpprober.py +++ /dev/null @@ -1,90 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys -from . import constants -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJPSMModel - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(EUCJPSMModel) - self._mDistributionAnalyzer = EUCJPDistributionAnalysis() - self._mContextAnalyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - MultiByteCharSetProber.reset(self) - self._mContextAnalyzer.reset() - - def get_charset_name(self): - return "EUC-JP" - - def feed(self, aBuf): - aLen = len(aBuf) - for i in range(0, aLen): - # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte - codingState = self._mCodingSM.next_state(aBuf[i]) - if codingState == constants.eError: - if constants._debug: - sys.stderr.write(self.get_charset_name() - + ' prober hit error at byte ' + str(i) - + '\n') - self._mState = constants.eNotMe - break - elif codingState == constants.eItsMe: - self._mState = constants.eFoundIt - break - elif codingState == constants.eStart: - charLen = self._mCodingSM.get_current_charlen() - if i == 0: - self._mLastChar[1] = aBuf[0] - self._mContextAnalyzer.feed(self._mLastChar, charLen) - self._mDistributionAnalyzer.feed(self._mLastChar, charLen) - else: - self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) - self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], - charLen) - - self._mLastChar[0] = aBuf[aLen - 1] - - if self.get_state() == constants.eDetecting: - if (self._mContextAnalyzer.got_enough_data() and - (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): - self._mState = constants.eFoundIt - - return self.get_state() - - def get_confidence(self): - contxtCf = self._mContextAnalyzer.get_confidence() - distribCf = self._mDistributionAnalyzer.get_confidence() - return max(contxtCf, distribCf) diff --git a/awx/lib/site-packages/requests/packages/charade/euckrfreq.py b/awx/lib/site-packages/requests/packages/charade/euckrfreq.py deleted file mode 100644 index a179e4c21c..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/euckrfreq.py +++ /dev/null @@ -1,596 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKRCharToFreqOrder = ( \ - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -#Everything below is of no interest for detection purpose -2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658, -2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674, -2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690, -2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704, -2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720, -2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734, -2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750, -2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765, -2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779, -2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793, -2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809, -2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824, -2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840, -2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856, -1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869, -2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883, -2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899, -2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915, -2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331, -2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945, -2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961, -2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976, -2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992, -2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008, -3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021, -3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037, -3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052, -3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066, -3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080, -3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095, -3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110, -3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124, -3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140, -3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156, -3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172, -3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187, -3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201, -3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217, -3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233, -3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248, -3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264, -3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279, -3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295, -3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311, -3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327, -3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343, -3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359, -3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374, -3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389, -3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405, -3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338, -3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432, -3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446, -3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191, -3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471, -3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486, -1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499, -1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513, -3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525, -3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541, -3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557, -3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573, -3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587, -3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603, -3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618, -3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632, -3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648, -3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663, -3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679, -3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695, -3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583, -1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722, -3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738, -3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753, -3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767, -3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782, -3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796, -3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810, -3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591, -1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836, -3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851, -3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866, -3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880, -3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895, -1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905, -3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921, -3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934, -3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603, -3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964, -3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978, -3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993, -3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009, -4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024, -4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040, -1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055, -4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069, -4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083, -4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098, -4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113, -4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610, -4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142, -4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157, -4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173, -4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189, -4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205, -4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220, -4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234, -4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249, -4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265, -4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279, -4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294, -4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310, -4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326, -4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341, -4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357, -4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371, -4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387, -4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403, -4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418, -4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432, -4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446, -4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461, -4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476, -4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491, -4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507, -4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623, -4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536, -4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551, -4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567, -4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581, -4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627, -4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611, -4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626, -4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642, -4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657, -4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672, -4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687, -1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700, -4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715, -4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731, -4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633, -4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758, -4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773, -4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788, -4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803, -4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817, -4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832, -4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847, -4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863, -4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879, -4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893, -4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909, -4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923, -4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938, -4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954, -4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970, -4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645, -4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999, -5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078, -5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028, -1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042, -5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056, -5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072, -5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087, -5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103, -5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118, -1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132, -5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148, -5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161, -5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177, -5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192, -5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206, -1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218, -5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234, -5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249, -5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262, -5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278, -5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293, -5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308, -5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323, -5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338, -5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353, -5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369, -5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385, -5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400, -5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415, -5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430, -5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445, -5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461, -5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477, -5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491, -5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507, -5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523, -5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539, -5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554, -5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570, -1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585, -5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600, -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615, -5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631, -5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646, -5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660, -1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673, -5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688, -5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703, -5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716, -5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729, -5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744, -1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758, -5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773, -1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786, -5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801, -5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815, -5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831, -5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847, -5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862, -5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876, -5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889, -5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905, -5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687, -5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951, -5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963, -5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979, -5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993, -5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009, -6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025, -6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039, -6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055, -6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071, -6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086, -6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102, -6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118, -6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133, -6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147, -6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163, -6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179, -6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194, -6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210, -6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225, -6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241, -6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256, -6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024 -6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287, -6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699, -6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317, -6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333, -6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347, -6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363, -6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379, -6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395, -6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411, -6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425, -6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440, -6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456, -6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472, -6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488, -6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266, -6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519, -6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535, -6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551, -1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565, -6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581, -6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597, -6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613, -6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629, -6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644, -1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659, -6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674, -1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689, -6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705, -6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721, -6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736, -1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748, -6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763, -6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779, -6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794, -6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711, -6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825, -6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840, -6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856, -6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872, -6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888, -6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903, -6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918, -6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934, -6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950, -6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966, -6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981, -6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996, -6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011, -7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027, -7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042, -7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058, -7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074, -7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090, -7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106, -7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122, -7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138, -7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154, -7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170, -7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186, -7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202, -7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216, -7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232, -7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248, -7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264, -7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280, -7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296, -7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312, -7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327, -7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343, -7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359, -7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375, -7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391, -7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407, -7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423, -7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439, -7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455, -7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471, -7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487, -7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503, -7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519, -7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535, -7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551, -7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, -7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583, -7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599, -7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615, -7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631, -7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647, -7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663, -7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679, -7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695, -7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711, -7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727, -7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743, -7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759, -7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775, -7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791, -7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807, -7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823, -7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839, -7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855, -7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871, -7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887, -7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903, -7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919, -7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, -7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, -7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, -7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, -7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, -8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, -8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, -8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, -8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, -8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, -8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, -8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, -8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, -8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, -8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, -8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, -8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, -8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, -8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, -8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, -8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, -8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271, -8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287, -8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303, -8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319, -8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335, -8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351, -8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367, -8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383, -8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399, -8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415, -8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431, -8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447, -8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463, -8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479, -8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495, -8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511, -8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527, -8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543, -8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559, -8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575, -8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591, -8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607, -8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623, -8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639, -8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655, -8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671, -8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687, -8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, -8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719, -8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735, -8736,8737,8738,8739,8740,8741) - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/euckrprober.py b/awx/lib/site-packages/requests/packages/charade/euckrprober.py deleted file mode 100644 index def3e42902..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/euckrprober.py +++ /dev/null @@ -1,42 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKRSMModel - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(EUCKRSMModel) - self._mDistributionAnalyzer = EUCKRDistributionAnalysis() - self.reset() - - def get_charset_name(self): - return "EUC-KR" diff --git a/awx/lib/site-packages/requests/packages/charade/euctwfreq.py b/awx/lib/site-packages/requests/packages/charade/euctwfreq.py deleted file mode 100644 index 576e7504dc..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/euctwfreq.py +++ /dev/null @@ -1,428 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# <http:#www.edu.tw:81/mandr/> - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 8102 - -EUCTWCharToFreqOrder = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -#Everything below is of no interest for detection purpose -2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118 -2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134 -8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150 -8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166 -8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182 -8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198 -8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214 -8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230 -8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246 -8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262 -8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278 -8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294 -8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310 -8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326 -8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342 -8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358 -8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374 -8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390 -8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406 -8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422 -8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438 -8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454 -8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470 -8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486 -8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502 -8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518 -8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534 -8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550 -8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566 -8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582 -8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598 -8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614 -8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630 -8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646 -8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662 -8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678 -8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694 -8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710 -8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726 -8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742 - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/euctwprober.py b/awx/lib/site-packages/requests/packages/charade/euctwprober.py deleted file mode 100644 index e601adfdc6..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/euctwprober.py +++ /dev/null @@ -1,41 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTWSMModel - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(EUCTWSMModel) - self._mDistributionAnalyzer = EUCTWDistributionAnalysis() - self.reset() - - def get_charset_name(self): - return "EUC-TW" diff --git a/awx/lib/site-packages/requests/packages/charade/gb2312freq.py b/awx/lib/site-packages/requests/packages/charade/gb2312freq.py deleted file mode 100644 index 1238f510fc..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/gb2312freq.py +++ /dev/null @@ -1,472 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312CharToFreqOrder = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512 -#Everything below is of no interest for detection purpose -5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636, -5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874, -5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278, -3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806, -4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827, -5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512, -5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578, -4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828, -4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105, -4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189, -4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561, -3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226, -6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778, -4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039, -6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404, -4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213, -4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739, -4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328, -5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592, -3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424, -4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270, -3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232, -4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456, -4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121, -6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971, -6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409, -5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519, -4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367, -6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834, -4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460, -5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464, -5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709, -5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906, -6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530, -3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262, -6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920, -4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190, -5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318, -6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538, -6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697, -4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544, -5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016, -4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638, -5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006, -5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071, -4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552, -4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556, -5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432, -4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632, -4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885, -5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336, -4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729, -4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854, -4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332, -5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004, -5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419, -4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293, -3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580, -4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339, -6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341, -5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493, -5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046, -4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904, -6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728, -5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350, -6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233, -4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944, -5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413, -5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700, -3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999, -5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694, -6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571, -4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359, -6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178, -4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421, -4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330, -6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855, -3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587, -6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803, -4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791, -3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304, -3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445, -3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506, -4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856, -2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057, -5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777, -4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369, -5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028, -5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914, -5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175, -4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681, -5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534, -4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912, -5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054, -1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336, -3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666, -4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375, -4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113, -6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614, -4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173, -5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197, -3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271, -5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423, -5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529, -5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921, -3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837, -5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922, -5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187, -3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382, -5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628, -5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683, -5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053, -6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928, -4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662, -6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663, -4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554, -3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191, -4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013, -5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932, -5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055, -5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829, -3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096, -3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660, -6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199, -6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748, -5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402, -6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957, -6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668, -6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763, -6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407, -6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051, -5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429, -6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791, -6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028, -3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305, -3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159, -4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683, -4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372, -3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514, -5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544, -5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472, -5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716, -5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905, -5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327, -4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030, -5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281, -6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224, -5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327, -4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062, -4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354, -6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065, -3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953, -4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681, -4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708, -5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442, -6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387, -6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237, -4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713, -6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547, -5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957, -5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337, -5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074, -5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685, -5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455, -4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722, -5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615, -5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093, -5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989, -5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094, -6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212, -4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967, -5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733, -4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260, -4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864, -6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353, -4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095, -6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287, -3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504, -5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539, -6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750, -6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864, -6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213, -5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573, -6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252, -6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970, -3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703, -5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978, -4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767) - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/gb2312prober.py b/awx/lib/site-packages/requests/packages/charade/gb2312prober.py deleted file mode 100644 index 643fe2519e..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/gb2312prober.py +++ /dev/null @@ -1,41 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312SMModel - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(GB2312SMModel) - self._mDistributionAnalyzer = GB2312DistributionAnalysis() - self.reset() - - def get_charset_name(self): - return "GB2312" diff --git a/awx/lib/site-packages/requests/packages/charade/hebrewprober.py b/awx/lib/site-packages/requests/packages/charade/hebrewprober.py deleted file mode 100644 index 90d171f302..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/hebrewprober.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .constants import eNotMe, eDetecting -from .compat import wrap_ord - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -# windows-1255 / ISO-8859-8 code points of interest -FINAL_KAF = 0xea -NORMAL_KAF = 0xeb -FINAL_MEM = 0xed -NORMAL_MEM = 0xee -FINAL_NUN = 0xef -NORMAL_NUN = 0xf0 -FINAL_PE = 0xf3 -NORMAL_PE = 0xf4 -FINAL_TSADI = 0xf5 -NORMAL_TSADI = 0xf6 - -# Minimum Visual vs Logical final letter score difference. -# If the difference is below this, don't rely solely on the final letter score -# distance. -MIN_FINAL_CHAR_DISTANCE = 5 - -# Minimum Visual vs Logical model score difference. -# If the difference is below this, don't rely at all on the model score -# distance. -MIN_MODEL_DISTANCE = 0.01 - -VISUAL_HEBREW_NAME = "ISO-8859-8" -LOGICAL_HEBREW_NAME = "windows-1255" - - -class HebrewProber(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self._mLogicalProber = None - self._mVisualProber = None - self.reset() - - def reset(self): - self._mFinalCharLogicalScore = 0 - self._mFinalCharVisualScore = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._mPrev = ' ' - self._mBeforePrev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._mLogicalProber = logicalProber - self._mVisualProber = visualProber - - def is_final(self, c): - return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE, - FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE] - - def feed(self, aBuf): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.get_state() == eNotMe: - # Both model probers say it's not them. No reason to continue. - return eNotMe - - aBuf = self.filter_high_bit_only(aBuf) - - for cur in aBuf: - if cur == ' ': - # We stand on a space - a word just ended - if self._mBeforePrev != ' ': - # next-to-last char was not a space so self._mPrev is not a - # 1 letter word - if self.is_final(self._mPrev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._mFinalCharLogicalScore += 1 - elif self.is_non_final(self._mPrev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._mFinalCharVisualScore += 1 - else: - # Not standing on a space - if ((self._mBeforePrev == ' ') and - (self.is_final(self._mPrev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._mFinalCharVisualScore += 1 - self._mBeforePrev = self._mPrev - self._mPrev = cur - - # Forever detecting, till the end or until both model probers return - # eNotMe (handled above) - return eDetecting - - def get_charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore - if finalsub >= MIN_FINAL_CHAR_DISTANCE: - return LOGICAL_HEBREW_NAME - if finalsub <= -MIN_FINAL_CHAR_DISTANCE: - return VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._mLogicalProber.get_confidence() - - self._mVisualProber.get_confidence()) - if modelsub > MIN_MODEL_DISTANCE: - return LOGICAL_HEBREW_NAME - if modelsub < -MIN_MODEL_DISTANCE: - return VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return LOGICAL_HEBREW_NAME - - def get_state(self): - # Remain active as long as any of the model probers are active. - if (self._mLogicalProber.get_state() == eNotMe) and \ - (self._mVisualProber.get_state() == eNotMe): - return eNotMe - return eDetecting diff --git a/awx/lib/site-packages/requests/packages/charade/jisfreq.py b/awx/lib/site-packages/requests/packages/charade/jisfreq.py deleted file mode 100644 index 064345b086..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/jisfreq.py +++ /dev/null @@ -1,569 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JISCharToFreqOrder = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -#Everything below is of no interest for detection purpose -2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384 -6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400 -6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416 -6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432 -6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448 -4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464 -4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480 -3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496 -3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512 -4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528 -3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544 -6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560 -4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576 -6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592 -6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608 -6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624 -6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640 -6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656 -6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672 -3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688 -3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704 -6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720 -2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736 -4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752 -4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768 -4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784 -6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800 -3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816 -4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832 -4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848 -6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864 -4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880 -6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896 -3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912 -2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928 -4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944 -2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960 -6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976 -4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992 -6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008 -6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024 -6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040 -4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056 -6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072 -2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088 -6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104 -4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120 -6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136 -4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152 -4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168 -6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184 -6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200 -6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216 -3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232 -1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248 -3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264 -3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280 -4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296 -6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312 -3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328 -6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344 -3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360 -3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376 -2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392 -6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408 -6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424 -3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440 -6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456 -3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472 -6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488 -6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504 -6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520 -4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536 -6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552 -4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568 -3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584 -3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600 -6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616 -6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632 -4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648 -6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664 -6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680 -6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696 -6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712 -6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728 -6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744 -4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760 -4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776 -3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792 -6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808 -4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824 -2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840 -6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856 -6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872 -4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888 -2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904 -4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920 -2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936 -4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952 -4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968 -4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984 -6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000 -3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016 -6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032 -3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048 -6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064 -2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080 -3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096 -7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112 -2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128 -3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144 -3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160 -3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176 -3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192 -7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208 -7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224 -7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240 -7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256 -7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272 -4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288 -3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304 -3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320 -4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336 -3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352 -3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368 -7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384 -4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400 -7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416 -7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432 -7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448 -7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464 -7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480 -4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496 -4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512 -7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528 -3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544 -4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560 -7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576 -7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592 -4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608 -3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624 -3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640 -7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656 -4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672 -4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688 -4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704 -4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720 -4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736 -4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752 -7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768 -7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784 -7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800 -7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816 -7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832 -2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848 -3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864 -7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880 -7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896 -3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912 -4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928 -3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944 -3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960 -2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976 -7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992 -7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008 -4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024 -3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040 -3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056 -7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072 -7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088 -7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104 -4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120 -7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136 -2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152 -3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168 -4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184 -7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200 -4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216 -4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232 -7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248 -7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264 -5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280 -7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296 -7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312 -7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328 -7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344 -7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360 -5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376 -5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392 -7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408 -3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424 -7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440 -7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456 -3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472 -7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488 -7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504 -1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520 -3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536 -4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552 -2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568 -3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584 -2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600 -5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616 -4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632 -4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648 -5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664 -7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680 -7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696 -7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712 -7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728 -3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744 -7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760 -3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776 -7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792 -4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808 -7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824 -7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840 -7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856 -7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872 -7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888 -7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904 -7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920 -7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936 -7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952 -7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968 -7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984 -7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000 -8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016 -8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032 -8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048 -8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064 -8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080 -8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096 -8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112 -8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128 -8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144 -8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160 -8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176 -8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192 -8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208 -8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224 -8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240 -8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256 -8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272 - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/jpcntx.py b/awx/lib/site-packages/requests/packages/charade/jpcntx.py deleted file mode 100644 index e4e9e4da51..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/jpcntx.py +++ /dev/null @@ -1,219 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .compat import wrap_ord - -NUM_OF_CATEGORY = 6 -DONT_KNOW = -1 -ENOUGH_REL_THRESHOLD = 100 -MAX_REL_THRESHOLD = 1000 -MINIMUM_DATA_THRESHOLD = 4 - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis: - def __init__(self): - self.reset() - - def reset(self): - self._mTotalRel = 0 # total sequence received - # category counters, each interger counts sequence in its category - self._mRelSample = [0] * NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._mNeedToSkipCharNum = 0 - self._mLastCharOrder = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._mDone = False - - def feed(self, aBuf, aLen): - if self._mDone: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._mNeedToSkipCharNum - while i < aLen: - order, charLen = self.get_order(aBuf[i:i + 2]) - i += charLen - if i > aLen: - self._mNeedToSkipCharNum = i - aLen - self._mLastCharOrder = -1 - else: - if (order != -1) and (self._mLastCharOrder != -1): - self._mTotalRel += 1 - if self._mTotalRel > MAX_REL_THRESHOLD: - self._mDone = True - break - self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1 - self._mLastCharOrder = order - - def got_enough_data(self): - return self._mTotalRel > ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._mTotalRel > MINIMUM_DATA_THRESHOLD: - return float(self._mTotalRel - self._mRelSample[0]) / self._mTotalRel - else: - return DONT_KNOW - - def get_order(self, aBuf): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def get_order(self, aBuf): - if not aBuf: - return -1, 1 - # find out current char's byte length - first_char = wrap_ord(aBuf[0]) - if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)): - charLen = 2 - else: - charLen = 1 - - # return its order if it is hiragana - if len(aBuf) > 1: - second_char = wrap_ord(aBuf[1]) - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, charLen - - return -1, charLen - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, aBuf): - if not aBuf: - return -1, 1 - # find out current char's byte length - first_char = wrap_ord(aBuf[0]) - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - charLen = 2 - elif first_char == 0x8F: - charLen = 3 - else: - charLen = 1 - - # return its order if it is hiragana - if len(aBuf) > 1: - second_char = wrap_ord(aBuf[1]) - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, charLen - - return -1, charLen - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py b/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py deleted file mode 100644 index ea5a60ba04..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langbulgarianmodel.py +++ /dev/null @@ -1,229 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'charToOrderMap': Latin5_BulgarianCharToOrderMap, - 'precedenceMatrix': BulgarianLangModel, - 'mTypicalPositiveRatio': 0.969392, - 'keepEnglishLetter': False, - 'charsetName': "ISO-8859-5" -} - -Win1251BulgarianModel = { - 'charToOrderMap': win1251BulgarianCharToOrderMap, - 'precedenceMatrix': BulgarianLangModel, - 'mTypicalPositiveRatio': 0.969392, - 'keepEnglishLetter': False, - 'charsetName': "windows-1251" -} - - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py b/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py deleted file mode 100644 index 15e338fc11..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langcyrillicmodel.py +++ /dev/null @@ -1,329 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'charToOrderMap': KOI8R_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "KOI8-R" -} - -Win1251CyrillicModel = { - 'charToOrderMap': win1251_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "windows-1251" -} - -Latin5CyrillicModel = { - 'charToOrderMap': latin5_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "ISO-8859-5" -} - -MacCyrillicModel = { - 'charToOrderMap': macCyrillic_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "MacCyrillic" -}; - -Ibm866Model = { - 'charToOrderMap': IBM866_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "IBM866" -} - -Ibm855Model = { - 'charToOrderMap': IBM855_CharToOrderMap, - 'precedenceMatrix': RussianLangModel, - 'mTypicalPositiveRatio': 0.976601, - 'keepEnglishLetter': False, - 'charsetName': "IBM855" -} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py b/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py deleted file mode 100644 index 93241ce26b..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'charToOrderMap': Latin7_CharToOrderMap, - 'precedenceMatrix': GreekLangModel, - 'mTypicalPositiveRatio': 0.982851, - 'keepEnglishLetter': False, - 'charsetName': "ISO-8859-7" -} - -Win1253GreekModel = { - 'charToOrderMap': win1253_CharToOrderMap, - 'precedenceMatrix': GreekLangModel, - 'mTypicalPositiveRatio': 0.982851, - 'keepEnglishLetter': False, - 'charsetName': "windows-1253" -} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py b/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py deleted file mode 100644 index d87132446d..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langhebrewmodel.py +++ /dev/null @@ -1,201 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -win1255_CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HebrewLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'charToOrderMap': win1255_CharToOrderMap, - 'precedenceMatrix': HebrewLangModel, - 'mTypicalPositiveRatio': 0.984004, - 'keepEnglishLetter': False, - 'charsetName': "windows-1255" -} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py b/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py deleted file mode 100644 index 6f59c61260..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'charToOrderMap': Latin2_HungarianCharToOrderMap, - 'precedenceMatrix': HungarianLangModel, - 'mTypicalPositiveRatio': 0.947368, - 'keepEnglishLetter': True, - 'charsetName': "ISO-8859-2" -} - -Win1250HungarianModel = { - 'charToOrderMap': win1250HungarianCharToOrderMap, - 'precedenceMatrix': HungarianLangModel, - 'mTypicalPositiveRatio': 0.947368, - 'keepEnglishLetter': True, - 'charsetName': "windows-1250" -} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/langthaimodel.py b/awx/lib/site-packages/requests/packages/charade/langthaimodel.py deleted file mode 100644 index df343a7473..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/langthaimodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'charToOrderMap': TIS620CharToOrderMap, - 'precedenceMatrix': ThaiLangModel, - 'mTypicalPositiveRatio': 0.926386, - 'keepEnglishLetter': False, - 'charsetName': "TIS-620" -} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/latin1prober.py b/awx/lib/site-packages/requests/packages/charade/latin1prober.py deleted file mode 100644 index 18eefd46a5..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/latin1prober.py +++ /dev/null @@ -1,139 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .constants import eNotMe -from .compat import wrap_ord - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( - # UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self.reset() - - def reset(self): - self._mLastCharClass = OTH - self._mFreqCounter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - def get_charset_name(self): - return "windows-1252" - - def feed(self, aBuf): - aBuf = self.filter_with_english_letters(aBuf) - for c in aBuf: - charClass = Latin1_CharToClass[wrap_ord(c)] - freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM) - + charClass] - if freq == 0: - self._mState = eNotMe - break - self._mFreqCounter[freq] += 1 - self._mLastCharClass = charClass - - return self.get_state() - - def get_confidence(self): - if self.get_state() == eNotMe: - return 0.01 - - total = sum(self._mFreqCounter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((float(self._mFreqCounter[3]) / total) - - (self._mFreqCounter[1] * 20.0 / total)) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.5 - return confidence diff --git a/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py b/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py deleted file mode 100644 index 1eee253c04..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/mbcharsetprober.py +++ /dev/null @@ -1,86 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys -from . import constants -from .charsetprober import CharSetProber - - -class MultiByteCharSetProber(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self._mDistributionAnalyzer = None - self._mCodingSM = None - self._mLastChar = [0, 0] - - def reset(self): - CharSetProber.reset(self) - if self._mCodingSM: - self._mCodingSM.reset() - if self._mDistributionAnalyzer: - self._mDistributionAnalyzer.reset() - self._mLastChar = [0, 0] - - def get_charset_name(self): - pass - - def feed(self, aBuf): - aLen = len(aBuf) - for i in range(0, aLen): - codingState = self._mCodingSM.next_state(aBuf[i]) - if codingState == constants.eError: - if constants._debug: - sys.stderr.write(self.get_charset_name() - + ' prober hit error at byte ' + str(i) - + '\n') - self._mState = constants.eNotMe - break - elif codingState == constants.eItsMe: - self._mState = constants.eFoundIt - break - elif codingState == constants.eStart: - charLen = self._mCodingSM.get_current_charlen() - if i == 0: - self._mLastChar[1] = aBuf[0] - self._mDistributionAnalyzer.feed(self._mLastChar, charLen) - else: - self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], - charLen) - - self._mLastChar[0] = aBuf[aLen - 1] - - if self.get_state() == constants.eDetecting: - if (self._mDistributionAnalyzer.got_enough_data() and - (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): - self._mState = constants.eFoundIt - - return self.get_state() - - def get_confidence(self): - return self._mDistributionAnalyzer.get_confidence() diff --git a/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py b/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py deleted file mode 100644 index 2f6f5e897f..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self): - CharSetGroupProber.__init__(self) - self._mProbers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/awx/lib/site-packages/requests/packages/charade/mbcssm.py b/awx/lib/site-packages/requests/packages/charade/mbcssm.py deleted file mode 100644 index 55c02f0a06..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/mbcssm.py +++ /dev/null @@ -1,575 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .constants import eStart, eError, eItsMe - -# BIG5 - -BIG5_cls = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_st = ( - eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07 - eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f - eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17 -) - -Big5CharLenTable = (0, 1, 1, 2, 0) - -Big5SMModel = {'classTable': BIG5_cls, - 'classFactor': 5, - 'stateTable': BIG5_st, - 'charLenTable': Big5CharLenTable, - 'name': 'Big5'} - -# CP949 - -CP949_cls = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_st = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart - eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError - eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe - eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3 - eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4 - eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5 - eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6 -) - -CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949SMModel = {'classTable': CP949_cls, - 'classFactor': 10, - 'stateTable': CP949_st, - 'charLenTable': CP949CharLenTable, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_cls = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_st = ( - 3, 4, 3, 5,eStart,eError,eError,eError,#00-07 - eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17 - eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f - 3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27 -) - -EUCJPCharLenTable = (2, 2, 2, 3, 1, 0) - -EUCJPSMModel = {'classTable': EUCJP_cls, - 'classFactor': 6, - 'stateTable': EUCJP_st, - 'charLenTable': EUCJPCharLenTable, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_cls = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_st = ( - eError,eStart, 3,eError,eError,eError,eError,eError,#00-07 - eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f -) - -EUCKRCharLenTable = (0, 1, 2, 0) - -EUCKRSMModel = {'classTable': EUCKR_cls, - 'classFactor': 4, - 'stateTable': EUCKR_st, - 'charLenTable': EUCKRCharLenTable, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_cls = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_st = ( - eError,eError,eStart, 3, 3, 3, 4,eError,#00-07 - eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17 - eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f - 5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27 - eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f -) - -EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3) - -EUCTWSMModel = {'classTable': EUCTW_cls, - 'classFactor': 7, - 'stateTable': EUCTW_st, - 'charLenTable': EUCTWCharLenTable, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_cls = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_st = ( - eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07 - eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17 - 4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f - eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27 - eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validing -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2) - -GB2312SMModel = {'classTable': GB2312_cls, - 'classFactor': 7, - 'stateTable': GB2312_st, - 'charLenTable': GB2312CharLenTable, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_cls = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,3,3,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 4,4,4,4,4,4,4,4, # f0 - f7 - 4,4,4,4,4,0,0,0 # f8 - ff -) - - -SJIS_st = ( - eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07 - eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17 -) - -SJISCharLenTable = (0, 1, 1, 2, 0, 0) - -SJISSMModel = {'classTable': SJIS_cls, - 'classFactor': 6, - 'stateTable': SJIS_st, - 'charLenTable': SJISCharLenTable, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_cls = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_st = ( - 5, 7, 7,eError, 4, 3,eError,eError,#00-07 - eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17 - 6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,eError,#20-27 - 5, 8, 6, 6,eError, 6, 6, 6,#28-2f - 6, 6, 6, 6,eError,eError,eStart,eStart #30-37 -) - -UCS2BECharLenTable = (2, 2, 2, 0, 2, 2) - -UCS2BESMModel = {'classTable': UCS2BE_cls, - 'classFactor': 6, - 'stateTable': UCS2BE_st, - 'charLenTable': UCS2BECharLenTable, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_cls = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_st = ( - 6, 6, 7, 6, 4, 3,eError,eError,#00-07 - eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f - eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17 - 5, 5, 5,eError, 5,eError, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,eError,#20-27 - 5, 5, 5,eError,eError,eError, 5, 5,#28-2f - 5, 5, 5,eError, 5,eError,eStart,eStart #30-37 -) - -UCS2LECharLenTable = (2, 2, 2, 2, 2, 2) - -UCS2LESMModel = {'classTable': UCS2LE_cls, - 'classFactor': 6, - 'stateTable': UCS2LE_st, - 'charLenTable': UCS2LECharLenTable, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_cls = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_st = ( - eError,eStart,eError,eError,eError,eError, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - eError,eError,eError,eError,eError,eError,eError,eError,#10-17 - eError,eError,eError,eError,eError,eError,eError,eError,#18-1f - eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27 - eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f - eError,eError, 5, 5, 5, 5,eError,eError,#30-37 - eError,eError,eError,eError,eError,eError,eError,eError,#38-3f - eError,eError,eError, 5, 5, 5,eError,eError,#40-47 - eError,eError,eError,eError,eError,eError,eError,eError,#48-4f - eError,eError, 7, 7, 7, 7,eError,eError,#50-57 - eError,eError,eError,eError,eError,eError,eError,eError,#58-5f - eError,eError,eError,eError, 7, 7,eError,eError,#60-67 - eError,eError,eError,eError,eError,eError,eError,eError,#68-6f - eError,eError, 9, 9, 9, 9,eError,eError,#70-77 - eError,eError,eError,eError,eError,eError,eError,eError,#78-7f - eError,eError,eError,eError,eError, 9,eError,eError,#80-87 - eError,eError,eError,eError,eError,eError,eError,eError,#88-8f - eError,eError, 12, 12, 12, 12,eError,eError,#90-97 - eError,eError,eError,eError,eError,eError,eError,eError,#98-9f - eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7 - eError,eError,eError,eError,eError,eError,eError,eError,#a8-af - eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7 - eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf - eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7 - eError,eError,eError,eError,eError,eError,eError,eError #c8-cf -) - -UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8SMModel = {'classTable': UTF8_cls, - 'classFactor': 16, - 'stateTable': UTF8_st, - 'charLenTable': UTF8CharLenTable, - 'name': 'UTF-8'} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py b/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py deleted file mode 100644 index da26715cfc..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/sbcharsetprober.py +++ /dev/null @@ -1,120 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys -from . import constants -from .charsetprober import CharSetProber -from .compat import wrap_ord - -SAMPLE_SIZE = 64 -SB_ENOUGH_REL_THRESHOLD = 1024 -POSITIVE_SHORTCUT_THRESHOLD = 0.95 -NEGATIVE_SHORTCUT_THRESHOLD = 0.05 -SYMBOL_CAT_ORDER = 250 -NUMBER_OF_SEQ_CAT = 4 -POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1 -#NEGATIVE_CAT = 0 - - -class SingleByteCharSetProber(CharSetProber): - def __init__(self, model, reversed=False, nameProber=None): - CharSetProber.__init__(self) - self._mModel = model - # TRUE if we need to reverse every pair in the model lookup - self._mReversed = reversed - # Optional auxiliary prober for name decision - self._mNameProber = nameProber - self.reset() - - def reset(self): - CharSetProber.reset(self) - # char order of last character - self._mLastOrder = 255 - self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT - self._mTotalSeqs = 0 - self._mTotalChar = 0 - # characters that fall in our sampling range - self._mFreqChar = 0 - - def get_charset_name(self): - if self._mNameProber: - return self._mNameProber.get_charset_name() - else: - return self._mModel['charsetName'] - - def feed(self, aBuf): - if not self._mModel['keepEnglishLetter']: - aBuf = self.filter_without_english_letters(aBuf) - aLen = len(aBuf) - if not aLen: - return self.get_state() - for c in aBuf: - order = self._mModel['charToOrderMap'][wrap_ord(c)] - if order < SYMBOL_CAT_ORDER: - self._mTotalChar += 1 - if order < SAMPLE_SIZE: - self._mFreqChar += 1 - if self._mLastOrder < SAMPLE_SIZE: - self._mTotalSeqs += 1 - if not self._mReversed: - i = (self._mLastOrder * SAMPLE_SIZE) + order - model = self._mModel['precedenceMatrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * SAMPLE_SIZE) + self._mLastOrder - model = self._mModel['precedenceMatrix'][i] - self._mSeqCounters[model] += 1 - self._mLastOrder = order - - if self.get_state() == constants.eDetecting: - if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD: - cf = self.get_confidence() - if cf > POSITIVE_SHORTCUT_THRESHOLD: - if constants._debug: - sys.stderr.write('%s confidence = %s, we have a' - 'winner\n' % - (self._mModel['charsetName'], cf)) - self._mState = constants.eFoundIt - elif cf < NEGATIVE_SHORTCUT_THRESHOLD: - if constants._debug: - sys.stderr.write('%s confidence = %s, below negative' - 'shortcut threshhold %s\n' % - (self._mModel['charsetName'], cf, - NEGATIVE_SHORTCUT_THRESHOLD)) - self._mState = constants.eNotMe - - return self.get_state() - - def get_confidence(self): - r = 0.01 - if self._mTotalSeqs > 0: - r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs - / self._mModel['mTypicalPositiveRatio']) - r = r * self._mFreqChar / self._mTotalChar - if r >= 1.0: - r = 0.99 - return r diff --git a/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py b/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py deleted file mode 100644 index b224814568..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/sbcsgroupprober.py +++ /dev/null @@ -1,69 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - CharSetGroupProber.__init__(self) - self._mProbers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - SingleByteCharSetProber(Latin2HungarianModel), - SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - ] - hebrewProber = HebrewProber() - logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrewProber) - visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrewProber) - hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber) - self._mProbers.extend([hebrewProber, logicalHebrewProber, - visualHebrewProber]) - - self.reset() diff --git a/awx/lib/site-packages/requests/packages/charade/sjisprober.py b/awx/lib/site-packages/requests/packages/charade/sjisprober.py deleted file mode 100644 index 9bb0cdcf1f..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/sjisprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJISSMModel -from . import constants - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - MultiByteCharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(SJISSMModel) - self._mDistributionAnalyzer = SJISDistributionAnalysis() - self._mContextAnalyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - MultiByteCharSetProber.reset(self) - self._mContextAnalyzer.reset() - - def get_charset_name(self): - return "SHIFT_JIS" - - def feed(self, aBuf): - aLen = len(aBuf) - for i in range(0, aLen): - codingState = self._mCodingSM.next_state(aBuf[i]) - if codingState == constants.eError: - if constants._debug: - sys.stderr.write(self.get_charset_name() - + ' prober hit error at byte ' + str(i) - + '\n') - self._mState = constants.eNotMe - break - elif codingState == constants.eItsMe: - self._mState = constants.eFoundIt - break - elif codingState == constants.eStart: - charLen = self._mCodingSM.get_current_charlen() - if i == 0: - self._mLastChar[1] = aBuf[0] - self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:], - charLen) - self._mDistributionAnalyzer.feed(self._mLastChar, charLen) - else: - self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3 - - charLen], charLen) - self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], - charLen) - - self._mLastChar[0] = aBuf[aLen - 1] - - if self.get_state() == constants.eDetecting: - if (self._mContextAnalyzer.got_enough_data() and - (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): - self._mState = constants.eFoundIt - - return self.get_state() - - def get_confidence(self): - contxtCf = self._mContextAnalyzer.get_confidence() - distribCf = self._mDistributionAnalyzer.get_confidence() - return max(contxtCf, distribCf) diff --git a/awx/lib/site-packages/requests/packages/charade/universaldetector.py b/awx/lib/site-packages/requests/packages/charade/universaldetector.py deleted file mode 100644 index 6a8e68a8a7..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/universaldetector.py +++ /dev/null @@ -1,168 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from . import constants -import sys -import codecs -from .latin1prober import Latin1Prober # windows-1252 -from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets -from .sbcsgroupprober import SBCSGroupProber # single-byte character sets -from .escprober import EscCharSetProber # ISO-2122, etc. -import re - -MINIMUM_THRESHOLD = 0.20 -ePureAscii = 0 -eEscAscii = 1 -eHighbyte = 2 - - -class UniversalDetector: - def __init__(self): - self._highBitDetector = re.compile(b'[\x80-\xFF]') - self._escDetector = re.compile(b'(\033|~{)') - self._mEscCharSetProber = None - self._mCharSetProbers = [] - self.reset() - - def reset(self): - self.result = {'encoding': None, 'confidence': 0.0} - self.done = False - self._mStart = True - self._mGotData = False - self._mInputState = ePureAscii - self._mLastChar = b'' - if self._mEscCharSetProber: - self._mEscCharSetProber.reset() - for prober in self._mCharSetProbers: - prober.reset() - - def feed(self, aBuf): - if self.done: - return - - aLen = len(aBuf) - if not aLen: - return - - if not self._mGotData: - # If the data starts with BOM, we know it is UTF - if aBuf[:3] == codecs.BOM: - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8", 'confidence': 1.0} - elif aBuf[:4] in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", 'confidence': 1.0} - elif aBuf[:4] == b'\xFE\xFF\x00\x00': - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = { - 'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0 - } - elif aBuf[:4] == b'\x00\x00\xFF\xFE': - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = { - 'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0 - } - elif aBuf[:2] == codecs.BOM_LE or aBuf[:2] == codecs.BOM_BE: - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", 'confidence': 1.0} - - self._mGotData = True - if self.result['encoding'] and (self.result['confidence'] > 0.0): - self.done = True - return - - if self._mInputState == ePureAscii: - if self._highBitDetector.search(aBuf): - self._mInputState = eHighbyte - elif ((self._mInputState == ePureAscii) and - self._escDetector.search(self._mLastChar + aBuf)): - self._mInputState = eEscAscii - - self._mLastChar = aBuf[-1:] - - if self._mInputState == eEscAscii: - if not self._mEscCharSetProber: - self._mEscCharSetProber = EscCharSetProber() - if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt: - self.result = { - 'encoding': self._mEscCharSetProber.get_charset_name(), - 'confidence': self._mEscCharSetProber.get_confidence() - } - self.done = True - elif self._mInputState == eHighbyte: - if not self._mCharSetProbers: - self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(), - Latin1Prober()] - for prober in self._mCharSetProbers: - if prober.feed(aBuf) == constants.eFoundIt: - self.result = {'encoding': prober.get_charset_name(), - 'confidence': prober.get_confidence()} - self.done = True - break - - def close(self): - if self.done: - return - if not self._mGotData: - if constants._debug: - sys.stderr.write('no data received!\n') - return - self.done = True - - if self._mInputState == ePureAscii: - self.result = {'encoding': 'ascii', 'confidence': 1.0} - return self.result - - if self._mInputState == eHighbyte: - proberConfidence = None - maxProberConfidence = 0.0 - maxProber = None - for prober in self._mCharSetProbers: - if not prober: - continue - proberConfidence = prober.get_confidence() - if proberConfidence > maxProberConfidence: - maxProberConfidence = proberConfidence - maxProber = prober - if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD): - self.result = {'encoding': maxProber.get_charset_name(), - 'confidence': maxProber.get_confidence()} - return self.result - - if constants._debug: - sys.stderr.write('no probers hit minimum threshhold\n') - for prober in self._mCharSetProbers[0].mProbers: - if not prober: - continue - sys.stderr.write('%s confidence = %s\n' % - (prober.get_charset_name(), - prober.get_confidence())) diff --git a/awx/lib/site-packages/requests/packages/charade/utf8prober.py b/awx/lib/site-packages/requests/packages/charade/utf8prober.py deleted file mode 100644 index 72c8d3d6a9..0000000000 --- a/awx/lib/site-packages/requests/packages/charade/utf8prober.py +++ /dev/null @@ -1,76 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from . import constants -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8SMModel - -ONE_CHAR_PROB = 0.5 - - -class UTF8Prober(CharSetProber): - def __init__(self): - CharSetProber.__init__(self) - self._mCodingSM = CodingStateMachine(UTF8SMModel) - self.reset() - - def reset(self): - CharSetProber.reset(self) - self._mCodingSM.reset() - self._mNumOfMBChar = 0 - - def get_charset_name(self): - return "utf-8" - - def feed(self, aBuf): - for c in aBuf: - codingState = self._mCodingSM.next_state(c) - if codingState == constants.eError: - self._mState = constants.eNotMe - break - elif codingState == constants.eItsMe: - self._mState = constants.eFoundIt - break - elif codingState == constants.eStart: - if self._mCodingSM.get_current_charlen() >= 2: - self._mNumOfMBChar += 1 - - if self.get_state() == constants.eDetecting: - if self.get_confidence() > constants.SHORTCUT_THRESHOLD: - self._mState = constants.eFoundIt - - return self.get_state() - - def get_confidence(self): - unlike = 0.99 - if self._mNumOfMBChar < 6: - for i in range(0, self._mNumOfMBChar): - unlike = unlike * ONE_CHAR_PROB - return 1.0 - unlike - else: - return unlike diff --git a/awx/lib/site-packages/requests/packages/chardet/__init__.py b/awx/lib/site-packages/requests/packages/chardet/__init__.py index e4f0799d62..82c2a48d29 100644 --- a/awx/lib/site-packages/requests/packages/chardet/__init__.py +++ b/awx/lib/site-packages/requests/packages/chardet/__init__.py @@ -15,7 +15,7 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### -__version__ = "2.2.1" +__version__ = "2.3.0" from sys import version_info diff --git a/awx/lib/site-packages/requests/packages/chardet/chardetect.py b/awx/lib/site-packages/requests/packages/chardet/chardetect.py old mode 100644 new mode 100755 index ecd0163be7..ffe892f25d --- a/awx/lib/site-packages/requests/packages/chardet/chardetect.py +++ b/awx/lib/site-packages/requests/packages/chardet/chardetect.py @@ -12,34 +12,68 @@ Example:: If no paths are provided, it takes its input from stdin. """ -from io import open -from sys import argv, stdin +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys +from io import open + +from chardet import __version__ from chardet.universaldetector import UniversalDetector -def description_of(file, name='stdin'): - """Return a string describing the probable encoding of a file.""" +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ u = UniversalDetector() - for line in file: + for line in lines: u.feed(line) u.close() result = u.result if result['encoding']: - return '%s: %s with confidence %s' % (name, - result['encoding'], - result['confidence']) + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) else: - return '%s: no result' % name + return '{0}: no result'.format(name) -def main(): - if len(argv) <= 1: - print(description_of(stdin)) - else: - for path in argv[1:]: - with open(path, 'rb') as f: - print(description_of(f, path)) +def main(argv=None): + ''' + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + ''' + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + conflict_handler='resolve') + parser.add_argument('input', + help='File whose encoding we would like to determine.', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) if __name__ == '__main__': diff --git a/awx/lib/site-packages/requests/packages/chardet/jpcntx.py b/awx/lib/site-packages/requests/packages/chardet/jpcntx.py index f7f69ba4cd..59aeb6a878 100644 --- a/awx/lib/site-packages/requests/packages/chardet/jpcntx.py +++ b/awx/lib/site-packages/requests/packages/chardet/jpcntx.py @@ -177,6 +177,12 @@ class JapaneseContextAnalysis: return -1, 1 class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + self.charset_name = "SHIFT_JIS" + + def get_charset_name(self): + return self.charset_name + def get_order(self, aBuf): if not aBuf: return -1, 1 @@ -184,6 +190,8 @@ class SJISContextAnalysis(JapaneseContextAnalysis): first_char = wrap_ord(aBuf[0]) if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)): charLen = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self.charset_name = "CP932" else: charLen = 1 diff --git a/awx/lib/site-packages/requests/packages/chardet/latin1prober.py b/awx/lib/site-packages/requests/packages/chardet/latin1prober.py index ad695f57a7..eef3573543 100644 --- a/awx/lib/site-packages/requests/packages/chardet/latin1prober.py +++ b/awx/lib/site-packages/requests/packages/chardet/latin1prober.py @@ -129,11 +129,11 @@ class Latin1Prober(CharSetProber): if total < 0.01: confidence = 0.0 else: - confidence = ((self._mFreqCounter[3] / total) - - (self._mFreqCounter[1] * 20.0 / total)) + confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0) + / total) if confidence < 0.0: confidence = 0.0 # lower the confidence of latin1 so that other more accurate # detector can take priority. - confidence = confidence * 0.5 + confidence = confidence * 0.73 return confidence diff --git a/awx/lib/site-packages/requests/packages/chardet/mbcssm.py b/awx/lib/site-packages/requests/packages/chardet/mbcssm.py index 3f93cfb045..efe678ca03 100644 --- a/awx/lib/site-packages/requests/packages/chardet/mbcssm.py +++ b/awx/lib/site-packages/requests/packages/chardet/mbcssm.py @@ -353,7 +353,7 @@ SJIS_cls = ( 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,3,3,3, # 80 - 87 + 3,3,3,3,3,2,2,3, # 80 - 87 3,3,3,3,3,3,3,3, # 88 - 8f 3,3,3,3,3,3,3,3, # 90 - 97 3,3,3,3,3,3,3,3, # 98 - 9f @@ -369,9 +369,8 @@ SJIS_cls = ( 2,2,2,2,2,2,2,2, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,4,4,4, # e8 - ef - 4,4,4,4,4,4,4,4, # f0 - f7 - 4,4,4,4,4,0,0,0 # f8 - ff -) + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff SJIS_st = ( @@ -571,5 +570,3 @@ UTF8SMModel = {'classTable': UTF8_cls, 'stateTable': UTF8_st, 'charLenTable': UTF8CharLenTable, 'name': 'UTF-8'} - -# flake8: noqa diff --git a/awx/lib/site-packages/requests/packages/chardet/sjisprober.py b/awx/lib/site-packages/requests/packages/chardet/sjisprober.py index b173614e68..cd0e9e7078 100644 --- a/awx/lib/site-packages/requests/packages/chardet/sjisprober.py +++ b/awx/lib/site-packages/requests/packages/chardet/sjisprober.py @@ -47,7 +47,7 @@ class SJISProber(MultiByteCharSetProber): self._mContextAnalyzer.reset() def get_charset_name(self): - return "SHIFT_JIS" + return self._mContextAnalyzer.get_charset_name() def feed(self, aBuf): aLen = len(aBuf) diff --git a/awx/lib/site-packages/requests/packages/chardet/universaldetector.py b/awx/lib/site-packages/requests/packages/chardet/universaldetector.py index 9a03ad3d89..476522b999 100644 --- a/awx/lib/site-packages/requests/packages/chardet/universaldetector.py +++ b/awx/lib/site-packages/requests/packages/chardet/universaldetector.py @@ -71,9 +71,9 @@ class UniversalDetector: if not self._mGotData: # If the data starts with BOM, we know it is UTF - if aBuf[:3] == codecs.BOM: + if aBuf[:3] == codecs.BOM_UTF8: # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8", 'confidence': 1.0} + self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0} elif aBuf[:4] == codecs.BOM_UTF32_LE: # FF FE 00 00 UTF-32, little-endian BOM self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} diff --git a/awx/lib/site-packages/requests/packages/urllib3/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/__init__.py index 73071f7001..dfc82d0336 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/__init__.py +++ b/awx/lib/site-packages/requests/packages/urllib3/__init__.py @@ -1,9 +1,3 @@ -# urllib3/__init__.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - """ urllib3 - Thread-safe connection pooling and re-using. """ @@ -23,7 +17,10 @@ from . import exceptions from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse -from .util import make_headers, get_host, Timeout +from .util.request import make_headers +from .util.url import get_host +from .util.timeout import Timeout +from .util.retry import Retry # Set default logging handler to avoid "No handler found" warnings. @@ -51,8 +48,19 @@ def add_stderr_logger(level=logging.DEBUG): handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) - logger.debug('Added an stderr logging handler to logger: %s' % __name__) + logger.debug('Added a stderr logging handler to logger: %s' % __name__) return handler # ... Clean up. del NullHandler + + +# Set security warning to only go off once by default. +import warnings +warnings.simplefilter('always', exceptions.SecurityWarning) + +def disable_warnings(category=exceptions.HTTPWarning): + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter('ignore', category) diff --git a/awx/lib/site-packages/requests/packages/urllib3/_collections.py b/awx/lib/site-packages/requests/packages/urllib3/_collections.py index 9cea3a44c4..784342a4eb 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/_collections.py +++ b/awx/lib/site-packages/requests/packages/urllib3/_collections.py @@ -1,9 +1,3 @@ -# urllib3/_collections.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - from collections import Mapping, MutableMapping try: from threading import RLock @@ -20,7 +14,7 @@ try: # Python 2.7+ from collections import OrderedDict except ImportError: from .packages.ordered_dict import OrderedDict -from .packages.six import itervalues +from .packages.six import iterkeys, itervalues __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] @@ -91,8 +85,7 @@ class RecentlyUsedContainer(MutableMapping): def clear(self): with self.lock: # Copy pointers to all values, then wipe the mapping - # under Python 2, this copies the list of values twice :-| - values = list(self._container.values()) + values = list(itervalues(self._container)) self._container.clear() if self.dispose_func: @@ -101,7 +94,7 @@ class RecentlyUsedContainer(MutableMapping): def keys(self): with self.lock: - return self._container.keys() + return list(iterkeys(self._container)) class HTTPHeaderDict(MutableMapping): @@ -116,7 +109,7 @@ class HTTPHeaderDict(MutableMapping): A ``dict`` like container for storing HTTP Headers. Field names are stored and compared case-insensitively in compliance with - RFC 2616. Iteration provides the first case-sensitive key seen for each + RFC 7230. Iteration provides the first case-sensitive key seen for each case-insensitive pair. Using ``__setitem__`` syntax overwrites fields that compare equal diff --git a/awx/lib/site-packages/requests/packages/urllib3/connection.py b/awx/lib/site-packages/requests/packages/urllib3/connection.py index 5feb3322ee..e5de769d8c 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/connection.py +++ b/awx/lib/site-packages/requests/packages/urllib3/connection.py @@ -1,95 +1,142 @@ -# urllib3/connection.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - +import datetime import sys import socket from socket import timeout as SocketTimeout +import warnings +from .packages import six -try: # Python 3 +try: # Python 3 from http.client import HTTPConnection as _HTTPConnection, HTTPException except ImportError: from httplib import HTTPConnection as _HTTPConnection, HTTPException + class DummyConnection(object): "Used to detect a failed ConnectionCls import." pass -try: # Compiled with SSL? - ssl = None + +try: # Compiled with SSL? HTTPSConnection = DummyConnection + import ssl + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None class BaseSSLError(BaseException): pass - try: # Python 3 - from http.client import HTTPSConnection as _HTTPSConnection - except ImportError: - from httplib import HTTPSConnection as _HTTPSConnection - import ssl - BaseSSLError = ssl.SSLError +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: # Python 2: + class ConnectionError(Exception): + pass -except (ImportError, AttributeError): # Platform-specific: No SSL. - pass from .exceptions import ( ConnectTimeoutError, + SystemTimeWarning, + SecurityWarning, ) from .packages.ssl_match_hostname import match_hostname -from .packages import six -from .util import ( - assert_fingerprint, + +from .util.ssl_ import ( resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, + assert_fingerprint, ) +from .util import connection + port_by_scheme = { 'http': 80, 'https': 443, } +RECENT_DATE = datetime.date(2014, 1, 1) + class HTTPConnection(_HTTPConnection, object): """ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + + .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x + + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass:: + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme['http'] - # By default, disable Nagle's Algorithm. - tcp_nodelay = 1 + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False def __init__(self, *args, **kw): if six.PY3: # Python 3 kw.pop('strict', None) - if sys.version_info < (2, 7): # Python 2.6 and older - kw.pop('source_address', None) # Pre-set source_address in case we have an older Python like 2.6. self.source_address = kw.get('source_address') + if sys.version_info < (2, 7): # Python 2.6 + # _HTTPConnection on Python 2.6 will balk at this keyword arg, but + # not newer versions. We can still use it when creating a + # connection though, so we pop it *after* we have saved it as + # self.source_address. + kw.pop('source_address', None) + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop('socket_options', self.default_socket_options) + # Superclass also sets self.source_address in Python 2.7+. - _HTTPConnection.__init__(self, *args, **kw) + _HTTPConnection.__init__(self, *args, **kw) def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. - :return: a new socket connection + :return: New socket connection. """ - extra_args = [] - if self.source_address: # Python 2.7+ - extra_args.append(self.source_address) + extra_kw = {} + if self.source_address: + extra_kw['source_address'] = self.source_address - conn = socket.create_connection( - (self.host, self.port), self.timeout, *extra_args) - conn.setsockopt( - socket.IPPROTO_TCP, socket.TCP_NODELAY, self.tcp_nodelay) + if self.socket_options: + extra_kw['socket_options'] = self.socket_options + + try: + conn = connection.create_connection( + (self.host, self.port), self.timeout, **extra_kw) + + except SocketTimeout: + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) return conn @@ -101,6 +148,8 @@ class HTTPConnection(_HTTPConnection, object): if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 def connect(self): conn = self._new_conn() @@ -137,7 +186,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): cert_reqs = None ca_certs = None ssl_version = None - conn_kw = {} + assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, @@ -152,18 +201,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): def connect(self): # Add certificate verification - - try: - sock = socket.create_connection( - address=(self.host, self.port), timeout=self.timeout, - **self.conn_kw) - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) - - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, - self.tcp_nodelay) + conn = self._new_conn() resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) @@ -173,29 +211,49 @@ class VerifiedHTTPSConnection(HTTPSConnection): # _tunnel_host was added in Python 2.6.3 # (See: http://hg.python.org/cpython/rev/0f57b30a152f) - self.sock = sock + self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 # Override the host with the one we're requesting data from. hostname = self._tunnel_host + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn(( + 'System time is way off (before {0}). This will probably ' + 'lead to SSL verification errors').format(RECENT_DATE), + SystemTimeWarning + ) + # Wrap socket using verification with the root certs in # trusted_root_certs - self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, + self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, server_hostname=hostname, ssl_version=resolved_ssl_version) - if resolved_cert_reqs != ssl.CERT_NONE: - if self.assert_fingerprint: - assert_fingerprint(self.sock.getpeercert(binary_form=True), - self.assert_fingerprint) - elif self.assert_hostname is not False: - match_hostname(self.sock.getpeercert(), - self.assert_hostname or hostname) + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + elif resolved_cert_reqs != ssl.CERT_NONE \ + and self.assert_hostname is not False: + cert = self.sock.getpeercert() + if not cert.get('subjectAltName', ()): + warnings.warn(( + 'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. ' + 'This feature is being removed by major browsers and deprecated by RFC 2818. ' + '(See https://github.com/shazow/urllib3/issues/497 for details.)'), + SecurityWarning + ) + match_hostname(cert, self.assert_hostname or hostname) + + self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None) if ssl: diff --git a/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py b/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py index 95a53a7df6..70ee4eed5e 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py +++ b/awx/lib/site-packages/requests/packages/urllib3/connectionpool.py @@ -1,17 +1,12 @@ -# urllib3/connectionpool.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -import sys import errno import logging +import sys +import warnings from socket import error as SocketError, timeout as SocketTimeout import socket -try: # Python 3 +try: # Python 3 from queue import LifoQueue, Empty, Full except ImportError: from Queue import LifoQueue, Empty, Full @@ -20,16 +15,16 @@ except ImportError: from .exceptions import ( ClosedPoolError, - ConnectionError, - ConnectTimeoutError, + ProtocolError, EmptyPoolError, HostChangedError, - LocationParseError, + LocationValueError, MaxRetryError, + ProxyError, + ReadTimeoutError, SSLError, TimeoutError, - ReadTimeoutError, - ProxyError, + InsecureRequestWarning, ) from .packages.ssl_match_hostname import CertificateError from .packages import six @@ -37,15 +32,15 @@ from .connection import ( port_by_scheme, DummyConnection, HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, - HTTPException, BaseSSLError, + HTTPException, BaseSSLError, ConnectionError ) from .request import RequestMethods from .response import HTTPResponse -from .util import ( - get_host, - is_connection_dropped, - Timeout, -) + +from .util.connection import is_connection_dropped +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host xrange = six.moves.xrange @@ -54,8 +49,8 @@ log = logging.getLogger(__name__) _Default = object() -## Pool objects +## Pool objects class ConnectionPool(object): """ Base class for all connection pools, such as @@ -66,13 +61,11 @@ class ConnectionPool(object): QueueCls = LifoQueue def __init__(self, host, port=None): - if host is None: - raise LocationParseError(host) + if not host: + raise LocationValueError("No host specified.") # httplib doesn't like it when we include brackets in ipv6 addresses - host = host.strip('[]') - - self.host = host + self.host = host.strip('[]') self.port = port def __str__(self): @@ -82,6 +75,7 @@ class ConnectionPool(object): # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) + class HTTPConnectionPool(ConnectionPool, RequestMethods): """ Thread-safe connection pool for one host. @@ -126,6 +120,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): Headers to include with all requests, unless other headers are given explicitly. + :param retries: + Retry configuration to use by default with requests in this pool. + :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" @@ -133,6 +130,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" + + :param \**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. """ scheme = 'http' @@ -140,18 +141,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, _proxy=None, _proxy_headers=None, **conn_kw): + headers=None, retries=None, + _proxy=None, _proxy_headers=None, + **conn_kw): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) self.strict = strict - # This is for backwards compatibility and can be removed once a timeout - # can only be set to a Timeout object if not isinstance(timeout, Timeout): timeout = Timeout.from_float(timeout) + if retries is None: + retries = Retry.DEFAULT + self.timeout = timeout + self.retries = retries self.pool = self.QueueCls(maxsize) self.block = block @@ -166,11 +171,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # These are mostly for testing and debugging purposes. self.num_connections = 0 self.num_requests = 0 - - if sys.version_info < (2, 7): # Python 2.6 and older - conn_kw.pop('source_address', None) self.conn_kw = conn_kw + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault('socket_options', []) + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -182,10 +190,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) - if self.proxy is not None: - # Enable Nagle's algorithm for proxies, to avoid packet - # fragmentation. - conn.tcp_nodelay = 0 return conn def _get_conn(self, timeout=None): @@ -204,7 +208,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): try: conn = self.pool.get(block=self.block, timeout=timeout) - except AttributeError: # self.pool is None + except AttributeError: # self.pool is None raise ClosedPoolError(self, "Pool is closed.") except Empty: @@ -218,6 +222,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if conn and is_connection_dropped(conn): log.info("Resetting dropped connection: %s" % self.host) conn.close() + if getattr(conn, 'auto_open', 1) == 0: + # This is a proxied connection that has been mutated by + # httplib._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None return conn or self._new_conn() @@ -237,7 +246,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): """ try: self.pool.put(conn, block=False) - return # Everything is dandy, done. + return # Everything is dandy, done. except AttributeError: # self.pool is None. pass @@ -251,6 +260,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if conn: conn.close() + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + def _get_timeout(self, timeout): """ Helper that always returns a :class:`urllib3.util.Timeout` """ if timeout is _Default: @@ -263,6 +278,23 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # can be removed later return Timeout.from_float(timeout) + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, 'errno') and err.errno in _blocking_errnos: + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 + raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) + def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw): """ @@ -282,23 +314,26 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): self.num_requests += 1 timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + # Trigger any extra validation we need to do. try: - timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout - # conn.request() calls httplib.*.request, not the method in - # urllib3.request. It also calls makefile (recv) on the socket. - conn.request(method, url, **httplib_request_kw) - except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, timeout_obj.connect_timeout)) + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls httplib.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + conn.request(method, url, **httplib_request_kw) # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr - if hasattr(conn, 'sock'): + if getattr(conn, 'sock', None): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching @@ -306,41 +341,20 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( - self, url, - "Read timed out. (read timeout=%s)" % read_timeout) + self, url, "Read timed out. (read timeout=%s)" % read_timeout) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) - else: # None or a value + else: # None or a value conn.sock.settimeout(read_timeout) # Receive the response from the server try: - try: # Python 2.7+, use buffering of HTTP responses + try: # Python 2.7+, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 2.6 and older + except TypeError: # Python 2.6 and older httplib_response = conn.getresponse() - except SocketTimeout: - raise ReadTimeoutError( - self, url, "Read timed out. (read timeout=%s)" % read_timeout) - - except BaseSSLError as e: - # Catch possible read timeouts thrown as SSL errors. If not the - # case, rethrow the original. We need to do this because of: - # http://bugs.python.org/issue10272 - if 'timed out' in str(e) or \ - 'did not complete (read)' in str(e): # Python 2.6 - raise ReadTimeoutError(self, url, "Read timed out.") - - raise - - except SocketError as e: # Platform-specific: Python 2 - # See the above comment about EAGAIN in Python 3. In Python 2 we - # have to specifically catch it and throw the timeout error - if e.errno in _blocking_errnos: - raise ReadTimeoutError( - self, url, - "Read timed out. (read timeout=%s)" % read_timeout) - + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise # AppEngine doesn't have a version attr. @@ -364,7 +378,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): conn.close() except Empty: - pass # Done. + pass # Done. def is_same_host(self, url): """ @@ -385,7 +399,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): return (scheme, host, port) == (self.scheme, self.host, self.port) - def urlopen(self, method, url, body=None, headers=None, retries=3, + def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw): """ @@ -419,9 +433,20 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): these headers completely replace any pool-specific headers. :param retries: - Number of retries to allow before raising a MaxRetryError exception. - If `False`, then retries are disabled and any exception is raised - immediately. + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, @@ -460,15 +485,15 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if headers is None: headers = self.headers - if retries < 0 and retries is not False: - raise MaxRetryError(self, url) + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get('preload_content', True) # Check host if assert_same_host and not self.is_same_host(url): - raise HostChangedError(self, url, retries - 1) + raise HostChangedError(self, url, retries) conn = None @@ -484,10 +509,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): err = None try: - # Request a connection from the queue + # Request a connection from the queue. conn = self._get_conn(timeout=pool_timeout) - # Make the request on the httplib connection object + # Make the request on the httplib connection object. httplib_response = self._make_request(conn, method, url, timeout=timeout, body=body, headers=headers) @@ -514,33 +539,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): raise EmptyPoolError(self, "No pool connections are available.") except (BaseSSLError, CertificateError) as e: - # Release connection unconditionally because there is no way to - # close it externally in case of exception. - release_conn = True + # Close the connection. If a connection is reused on which there + # was a Certificate error, the next request will certainly raise + # another Certificate error. + if conn: + conn.close() + conn = None raise SSLError(e) - except (TimeoutError, HTTPException, SocketError) as e: + except (TimeoutError, HTTPException, SocketError, ConnectionError) as e: if conn: # Discard the connection for these exceptions. It will be # be replaced during the next _get_conn() call. conn.close() conn = None - if not retries: - if isinstance(e, TimeoutError): - # TimeoutError is exempt from MaxRetryError-wrapping. - # FIXME: ... Not sure why. Add a reason here. - raise + stacktrace = sys.exc_info()[2] + if isinstance(e, SocketError) and self.proxy: + e = ProxyError('Cannot connect to proxy.', e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError('Connection aborted.', e) - # Wrap unexpected exceptions with the most appropriate - # module-level exception and re-raise. - if isinstance(e, SocketError) and self.proxy: - raise ProxyError('Cannot connect to proxy.', e) - - if retries is False: - raise ConnectionError('Connection failed.', e) - - raise MaxRetryError(self, url, e) + retries = retries.increment(method, url, error=e, + _pool=self, _stacktrace=stacktrace) + retries.sleep() # Keep track of the error for the retry warning. err = e @@ -554,23 +576,43 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if not conn: # Try again - log.warning("Retrying (%d attempts remain) after connection " + log.warning("Retrying (%r) after connection " "broken by '%r': %s" % (retries, err, url)) - return self.urlopen(method, url, body, headers, retries - 1, + return self.urlopen(method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) # Handle redirect? redirect_location = redirect and response.get_redirect_location() - if redirect_location and retries is not False: + if redirect_location: if response.status == 303: method = 'GET' + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + raise + return response + log.info("Redirecting %s -> %s" % (url, redirect_location)) return self.urlopen(method, redirect_location, body, headers, - retries - 1, redirect, assert_same_host, - timeout=timeout, pool_timeout=pool_timeout, - release_conn=release_conn, **response_kw) + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) + + # Check if we should retry the HTTP response. + if retries.is_forced_retry(method, status_code=response.status): + retries = retries.increment(method, url, response=response, _pool=self) + retries.sleep() + log.info("Forced retry: %s" % url) + return self.urlopen(method, url, body, headers, + retries=retries, redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, pool_timeout=pool_timeout, + release_conn=release_conn, **response_kw) return response @@ -597,19 +639,17 @@ class HTTPSConnectionPool(HTTPConnectionPool): ConnectionCls = HTTPSConnection def __init__(self, host, port=None, - strict=False, timeout=None, maxsize=1, - block=False, headers=None, + strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, + block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, assert_hostname=None, assert_fingerprint=None, **conn_kw): - if sys.version_info < (2, 7): # Python 2.6 or older - conn_kw.pop('source_address', None) - HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, _proxy, _proxy_headers, **conn_kw) + block, headers, retries, _proxy, _proxy_headers, + **conn_kw) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs @@ -617,7 +657,6 @@ class HTTPSConnectionPool(HTTPConnectionPool): self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint - self.conn_kw = conn_kw def _prepare_conn(self, conn): """ @@ -633,7 +672,6 @@ class HTTPSConnectionPool(HTTPConnectionPool): assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version - conn.conn_kw = self.conn_kw if self.proxy is not None: # Python 2.7+ @@ -641,7 +679,12 @@ class HTTPSConnectionPool(HTTPConnectionPool): set_tunnel = conn.set_tunnel except AttributeError: # Platform-specific: Python 2.6 set_tunnel = conn._set_tunnel - set_tunnel(self.host, self.port, self.proxy_headers) + + if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older + set_tunnel(self.host, self.port) + else: + set_tunnel(self.host, self.port, self.proxy_headers) + # Establish tunnel connection early, because otherwise httplib # would improperly set Host: header to proxy's IP:port. conn.connect() @@ -667,21 +710,29 @@ class HTTPSConnectionPool(HTTPConnectionPool): actual_host = self.proxy.host actual_port = self.proxy.port - extra_params = {} - if not six.PY3: # Python 2 - extra_params['strict'] = self.strict - extra_params.update(self.conn_kw) - conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, - **extra_params) - if self.proxy is not None: - # Enable Nagle's algorithm for proxies, to avoid packet - # fragmentation. - conn.tcp_nodelay = 0 + strict=self.strict, **self.conn_kw) return self._prepare_conn(conn) + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn(( + 'Unverified HTTPS request is being made. ' + 'Adding certificate verification is strongly advised. See: ' + 'https://urllib3.readthedocs.org/en/latest/security.html'), + InsecureRequestWarning) + def connection_from_url(url, **kw): """ @@ -698,7 +749,7 @@ def connection_from_url(url, **kw): :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. - Example: :: + Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') diff --git a/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py b/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py index b8cd933034..c6b266f5d1 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py +++ b/awx/lib/site-packages/requests/packages/urllib3/contrib/ntlmpool.py @@ -1,9 +1,3 @@ -# urllib3/contrib/ntlmpool.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - """ NTLM authenticating pool, contributed by erikcederstran diff --git a/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py b/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py index 21a12c68ad..8229090cb6 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py +++ b/awx/lib/site-packages/requests/packages/urllib3/contrib/pyopenssl.py @@ -29,7 +29,7 @@ Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS -encryption in Python 2 (see `CRIME attack`_). +compression in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. @@ -46,15 +46,18 @@ Module Variables ''' -from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT -from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName +try: + from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT + from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName +except SyntaxError as e: + raise ImportError(e) + import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder from pyasn1.type import univ, constraint from socket import _fileobject, timeout import ssl import select -from cStringIO import StringIO from .. import connection from .. import util @@ -67,9 +70,14 @@ HAS_SNI = SUBJ_ALT_NAME_SUPPORT # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } + +try: + _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) +except AttributeError: + pass + _openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, @@ -155,205 +163,68 @@ def get_subj_alt_name(peer_cert): return dns_name -class fileobject(_fileobject): - - def _wait_for_sock(self): - rd, wd, ed = select.select([self._sock], [], [], - self._sock.gettimeout()) - if not rd: - raise timeout() - - - def read(self, size=-1): - # Use max, disallow tiny reads in a loop as they are very inefficient. - # We never leave read() with any leftover data from a new recv() call - # in our internal buffer. - rbufsize = max(self._rbufsize, self.default_bufsize) - # Our use of StringIO rather than lists of string objects returned by - # recv() minimizes memory usage and fragmentation that occurs when - # rbufsize is large compared to the typical return value of recv(). - buf = self._rbuf - buf.seek(0, 2) # seek end - if size < 0: - # Read until EOF - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or EOF seen, whichever comes first - buf_len = buf.tell() - if buf_len >= size: - # Already have size bytes in our buffer? Extract and return. - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - left = size - buf_len - # recv() will malloc the amount of memory given as its - # parameter even though it often returns much less data - # than that. The returned data string is short lived - # as we copy it into a StringIO and free it. This avoids - # fragmentation issues on many platforms. - try: - data = self._sock.recv(left) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid buffer data copies when: - # - We have no data in our buffer. - # AND - # - Our call to recv returned exactly the - # number of bytes we were asked to read. - return data - if n == left: - buf.write(data) - del data # explicit free - break - assert n <= left, "recv(%d) returned %d bytes" % (left, n) - buf.write(data) - buf_len += n - del data # explicit free - #assert buf_len == buf.tell() - return buf.getvalue() - - def readline(self, size=-1): - buf = self._rbuf - buf.seek(0, 2) # seek end - if buf.tell() > 0: - # check if we already have it in our buffer - buf.seek(0) - bline = buf.readline(size) - if bline.endswith('\n') or len(bline) == size: - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return bline - del bline - if size < 0: - # Read until \n or EOF, whichever comes first - if self._rbufsize <= 1: - # Speed up unbuffered case - buf.seek(0) - buffers = [buf.read()] - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - data = None - recv = self._sock.recv - while True: - try: - while data != "\n": - data = recv(1) - if not data: - break - buffers.append(data) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - break - return "".join(buffers) - - buf.seek(0, 2) # seek end - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - nl = data.find('\n') - if nl >= 0: - nl += 1 - buf.write(data[:nl]) - self._rbuf.write(data[nl:]) - del data - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or \n or EOF seen, whichever comes first - buf.seek(0, 2) # seek end - buf_len = buf.tell() - if buf_len >= size: - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - left = size - buf_len - # did we just receive a newline? - nl = data.find('\n', 0, left) - if nl >= 0: - nl += 1 - # save the excess data to _rbuf - self._rbuf.write(data[nl:]) - if buf_len: - buf.write(data[:nl]) - break - else: - # Shortcut. Avoid data copy through buf when returning - # a substring of our first recv(). - return data[:nl] - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid data copy through buf when - # returning exactly all of our first recv(). - return data - if n >= left: - buf.write(data[:left]) - self._rbuf.write(data[left:]) - break - buf.write(data) - buf_len += n - #assert buf_len == buf.tell() - return buf.getvalue() - - class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' + '''API-compatibility wrapper for Python OpenSSL's Connection-class. - def __init__(self, connection, socket): + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + ''' + + def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 def fileno(self): return self.socket.fileno() def makefile(self, mode, bufsize=-1): - return fileobject(self.connection, mode, bufsize) + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): + return b'' + else: + raise + except OpenSSL.SSL.WantReadError: + rd, wd, ed = select.select( + [self.socket], [], [], self.socket.gettimeout()) + if not rd: + raise timeout('The read operation timed out') + else: + return self.recv(*args, **kwargs) + else: + return data def settimeout(self, timeout): return self.socket.settimeout(timeout) + def _send_until_done(self, data): + while True: + try: + return self.connection.send(data) + except OpenSSL.SSL.WantWriteError: + _, wlist, _ = select.select([], [self.socket], [], + self.socket.gettimeout()) + if not wlist: + raise timeout() + continue + def sendall(self, data): - return self.connection.sendall(data) + while len(data): + sent = self._send_until_done(data) + data = data[sent:] def close(self): - return self.connection.shutdown() + if self._makefile_refs < 1: + return self.connection.shutdown() + else: + self._makefile_refs -= 1 def getpeercert(self, binary_form=False): x509 = self.connection.get_peer_certificate() @@ -376,6 +247,15 @@ class WrappedSocket(object): ] } + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 @@ -386,6 +266,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ssl_version=None): ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) if certfile: + keyfile = keyfile or certfile # Match behaviour of the normal python ssl library ctx.use_certificate_file(certfile) if keyfile: ctx.use_privatekey_file(keyfile) diff --git a/awx/lib/site-packages/requests/packages/urllib3/exceptions.py b/awx/lib/site-packages/requests/packages/urllib3/exceptions.py index b4df831fec..0c6fd3c51b 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/exceptions.py +++ b/awx/lib/site-packages/requests/packages/urllib3/exceptions.py @@ -1,9 +1,3 @@ -# urllib3/exceptions.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - ## Base Exceptions @@ -11,6 +5,11 @@ class HTTPError(Exception): "Base exception used by this module." pass +class HTTPWarning(Warning): + "Base warning used by this module." + pass + + class PoolError(HTTPError): "Base exception for errors caused within a pool." @@ -44,29 +43,37 @@ class ProxyError(HTTPError): pass -class ConnectionError(HTTPError): - "Raised when a normal connection fails." - pass - - class DecodeError(HTTPError): "Raised when automatic decoding based on Content-Type fails." pass +class ProtocolError(HTTPError): + "Raised when something unexpected happens mid-request/response." + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + ## Leaf Exceptions class MaxRetryError(RequestError): - "Raised when the maximum number of retries is exceeded." + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ def __init__(self, pool, url, reason=None): self.reason = reason - message = "Max retries exceeded with url: %s" % url - if reason: - message += " (Caused by %s: %s)" % (type(reason), reason) - else: - message += " (Caused by redirect)" + message = "Max retries exceeded with url: %s (Caused by %r)" % ( + url, reason) RequestError.__init__(self, pool, url, message) @@ -116,7 +123,12 @@ class ClosedPoolError(PoolError): pass -class LocationParseError(ValueError, HTTPError): +class LocationValueError(ValueError, HTTPError): + "Raised when there is something wrong with a given URL input." + pass + + +class LocationParseError(LocationValueError): "Raised when get_host or similar fails to parse the URL input." def __init__(self, location): @@ -124,3 +136,24 @@ class LocationParseError(ValueError, HTTPError): HTTPError.__init__(self, message) self.location = location + + +class ResponseError(HTTPError): + "Used as a container for an error reason supplied in a MaxRetryError." + GENERIC_ERROR = 'too many error responses' + SPECIFIC_ERROR = 'too many {status_code} error responses' + + +class SecurityWarning(HTTPWarning): + "Warned when perfoming security reducing actions" + pass + + +class InsecureRequestWarning(SecurityWarning): + "Warned when making an unverified HTTPS request." + pass + + +class SystemTimeWarning(SecurityWarning): + "Warned when system time is suspected to be wrong" + pass diff --git a/awx/lib/site-packages/requests/packages/urllib3/fields.py b/awx/lib/site-packages/requests/packages/urllib3/fields.py index da79e929be..c853f8d56b 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/fields.py +++ b/awx/lib/site-packages/requests/packages/urllib3/fields.py @@ -1,9 +1,3 @@ -# urllib3/fields.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import email.utils import mimetypes @@ -78,9 +72,10 @@ class RequestField(object): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. - Supports constructing :class:`~urllib3.fields.RequestField` from parameter - of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) - tuple where the MIME type is optional. For example: :: + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), @@ -125,8 +120,8 @@ class RequestField(object): 'Content-Disposition' fields. :param header_parts: - A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as - `k1="v1"; k2="v2"; ...`. + A sequence of (k, v) typles or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. """ parts = [] iterable = header_parts @@ -158,7 +153,8 @@ class RequestField(object): lines.append('\r\n') return '\r\n'.join(lines) - def make_multipart(self, content_disposition=None, content_type=None, content_location=None): + def make_multipart(self, content_disposition=None, content_type=None, + content_location=None): """ Makes this request field into a multipart request field. @@ -172,6 +168,10 @@ class RequestField(object): """ self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))]) + self.headers['Content-Disposition'] += '; '.join([ + '', self._render_parts( + (('name', self._name), ('filename', self._filename)) + ) + ]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location diff --git a/awx/lib/site-packages/requests/packages/urllib3/filepost.py b/awx/lib/site-packages/requests/packages/urllib3/filepost.py index e8b30bddf2..0fbf488dfe 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/filepost.py +++ b/awx/lib/site-packages/requests/packages/urllib3/filepost.py @@ -1,11 +1,4 @@ -# urllib3/filepost.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import codecs -import mimetypes from uuid import uuid4 from io import BytesIO @@ -38,10 +31,10 @@ def iter_field_objects(fields): i = iter(fields) for field in i: - if isinstance(field, RequestField): - yield field - else: - yield RequestField.from_tuples(*field) + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) def iter_fields(fields): diff --git a/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py b/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py index 7f8ee15436..4479363cc4 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py +++ b/awx/lib/site-packages/requests/packages/urllib3/packages/ordered_dict.py @@ -2,7 +2,6 @@ # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger, released under the MIT License. # http://code.activestate.com/recipes/576693/ - try: from thread import get_ident as _get_ident except ImportError: diff --git a/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py b/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py index f18ff2bb7e..515dc96219 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py +++ b/awx/lib/site-packages/requests/packages/urllib3/poolmanager.py @@ -1,9 +1,3 @@ -# urllib3/poolmanager.py -# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - import logging try: # Python 3 @@ -14,8 +8,10 @@ except ImportError: from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme +from .exceptions import LocationValueError from .request import RequestMethods -from .util import parse_url +from .util.url import parse_url +from .util.retry import Retry __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] @@ -49,7 +45,7 @@ class PoolManager(RequestMethods): Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. - Example: :: + Example:: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') @@ -102,10 +98,11 @@ class PoolManager(RequestMethods): ``urllib3.connectionpool.port_by_scheme``. """ + if not host: + raise LocationValueError("No host specified.") + scheme = scheme or 'http' - port = port or port_by_scheme.get(scheme, 80) - pool_key = (scheme, host, port) with self.pools.lock: @@ -118,6 +115,7 @@ class PoolManager(RequestMethods): # Make a fresh ConnectionPool of the desired type pool = self._new_pool(scheme, host, port) self.pools[pool_key] = pool + return pool def connection_from_url(self, url): @@ -161,13 +159,18 @@ class PoolManager(RequestMethods): # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) - # RFC 2616, Section 10.3.4 + # RFC 7231, Section 6.4.4 if response.status == 303: method = 'GET' - log.info("Redirecting %s -> %s" % (url, redirect_location)) - kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown + retries = kw.get('retries') + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + kw['retries'] = retries.increment(method, redirect_location) kw['redirect'] = redirect + + log.info("Redirecting %s -> %s" % (url, redirect_location)) return self.urlopen(method, redirect_location, **kw) @@ -208,12 +211,16 @@ class ProxyManager(PoolManager): if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) + + assert proxy.scheme in ("http", "https"), \ + 'Not supported proxy scheme %s' % proxy.scheme + self.proxy = proxy self.proxy_headers = proxy_headers or {} - assert self.proxy.scheme in ("http", "https"), \ - 'Not supported proxy scheme %s' % self.proxy.scheme + connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers + super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) @@ -248,10 +255,10 @@ class ProxyManager(PoolManager): # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. - kw['headers'] = self._set_proxy_headers(url, kw.get('headers', - self.headers)) + headers = kw.get('headers', self.headers) + kw['headers'] = self._set_proxy_headers(url, headers) - return super(ProxyManager, self).urlopen(method, url, redirect, **kw) + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url, **kw): diff --git a/awx/lib/site-packages/requests/packages/urllib3/request.py b/awx/lib/site-packages/requests/packages/urllib3/request.py index 2a92cc2083..b08d6c9274 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/request.py +++ b/awx/lib/site-packages/requests/packages/urllib3/request.py @@ -1,9 +1,3 @@ -# urllib3/request.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - try: from urllib.parse import urlencode except ImportError: @@ -26,8 +20,8 @@ class RequestMethods(object): Specifically, - :meth:`.request_encode_url` is for sending requests whose fields are encoded - in the URL (such as GET, HEAD, DELETE). + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded @@ -51,7 +45,7 @@ class RequestMethods(object): def urlopen(self, method, url, body=None, headers=None, encode_multipart=True, multipart_boundary=None, - **kw): # Abstract + **kw): # Abstract raise NotImplemented("Classes extending RequestMethods must implement " "their own ``urlopen`` method.") @@ -61,8 +55,8 @@ class RequestMethods(object): ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual - effort. It can be used in most situations, while still having the option - to drop down to more specific methods when necessary, such as + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. """ @@ -70,12 +64,12 @@ class RequestMethods(object): if method in self._encode_url_methods: return self.request_encode_url(method, url, fields=fields, - headers=headers, - **urlopen_kw) + headers=headers, + **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, - headers=headers, - **urlopen_kw) + headers=headers, + **urlopen_kw) def request_encode_url(self, method, url, fields=None, **urlopen_kw): """ @@ -94,18 +88,18 @@ class RequestMethods(object): the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the - payload with the appropriate content type. Otherwise + :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably - safe to use it in other times too. However, it may break request signing, - such as with OAuth. + safe to use it in other times too. However, it may break request + signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where - the MIME type is optional. For example: :: + the MIME type is optional. For example:: fields = { 'foo': 'bar', @@ -119,23 +113,29 @@ class RequestMethods(object): When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. - Note that if ``headers`` are supplied, the 'Content-Type' header will be - overwritten because it depends on the dynamic random boundary string + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. """ - if encode_multipart: - body, content_type = encode_multipart_formdata(fields or {}, - boundary=multipart_boundary) - else: - body, content_type = (urlencode(fields or {}), - 'application/x-www-form-urlencoded') - if headers is None: headers = self.headers - headers_ = {'Content-Type': content_type} - headers_.update(headers) + extra_kw = {'headers': {}} - return self.urlopen(method, url, body=body, headers=headers_, - **urlopen_kw) + if fields: + if 'body' in urlopen_kw: + raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.') + + if encode_multipart: + body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) + else: + body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' + + extra_kw['body'] = body + extra_kw['headers'] = {'Content-Type': content_type} + + extra_kw['headers'].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/awx/lib/site-packages/requests/packages/urllib3/response.py b/awx/lib/site-packages/requests/packages/urllib3/response.py index db441828aa..e69de95733 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/response.py +++ b/awx/lib/site-packages/requests/packages/urllib3/response.py @@ -1,22 +1,14 @@ -# urllib3/response.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -import logging import zlib import io +from socket import timeout as SocketTimeout from ._collections import HTTPHeaderDict -from .exceptions import DecodeError +from .exceptions import ProtocolError, DecodeError, ReadTimeoutError from .packages.six import string_types as basestring, binary_type -from .util import is_fp_closed +from .connection import HTTPException, BaseSSLError +from .util.response import is_fp_closed -log = logging.getLogger(__name__) - class DeflateDecoder(object): @@ -56,7 +48,10 @@ class HTTPResponse(io.IOBase): HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. Extra parameters for behaviour not present in httplib.HTTPResponse: @@ -91,11 +86,14 @@ class HTTPResponse(io.IOBase): self.decode_content = decode_content self._decoder = None - self._body = body if body and isinstance(body, basestring) else None + self._body = None self._fp = None self._original_response = original_response self._fp_bytes_read = 0 + if body and isinstance(body, (basestring, binary_type)): + self._body = body + self._pool = pool self._connection = connection @@ -163,8 +161,8 @@ class HTTPResponse(io.IOBase): after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ - # Note: content-encoding value should be case-insensitive, per RFC 2616 - # Section 3.5 + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: @@ -178,23 +176,42 @@ class HTTPResponse(io.IOBase): flush_decoder = False try: - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do not - # properly close the connection in all cases. There is no harm - # in redundantly calling close. - self._fp.close() + try: + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) + if amt != 0 and not data: # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if not 'read operation timed out' in str(e): # Defensive: + # This shouldn't happen but just in case we're missing an edge + # case, let's avoid swallowing SSL errors. + raise + + raise ReadTimeoutError(self._pool, None, 'Read timed out.') + + except HTTPException as e: + # This includes IncompleteRead. + raise ProtocolError('Connection broken: %r' % e, e) self._fp_bytes_read += len(data) @@ -204,8 +221,7 @@ class HTTPResponse(io.IOBase): except (IOError, zlib.error) as e: raise DecodeError( "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, - e) + "failed to decode it." % content_encoding, e) if flush_decoder and decode_content and self._decoder: buf = self._decoder.decompress(binary_type()) @@ -242,7 +258,6 @@ class HTTPResponse(io.IOBase): if data: yield data - @classmethod def from_httplib(ResponseCls, r, **response_kw): """ @@ -297,7 +312,7 @@ class HTTPResponse(io.IOBase): elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: - raise IOError("The file-like object this HTTPResponse is wrapped " + raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): @@ -305,4 +320,14 @@ class HTTPResponse(io.IOBase): return self._fp.flush() def readable(self): + # This method is required for `io` module compatibility. return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[:len(temp)] = temp + return len(temp) diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/__init__.py b/awx/lib/site-packages/requests/packages/urllib3/util/__init__.py index a40185eeaf..8becc81433 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/__init__.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/__init__.py @@ -1,9 +1,4 @@ -# urllib3/util/__init__.py -# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - +# For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped from .request import make_headers from .response import is_fp_closed @@ -19,6 +14,8 @@ from .timeout import ( current_time, Timeout, ) + +from .retry import Retry from .url import ( get_host, parse_url, diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/connection.py b/awx/lib/site-packages/requests/packages/urllib3/util/connection.py index 8deeab5cc0..2156993a0c 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/connection.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/connection.py @@ -1,4 +1,4 @@ -from socket import error as SocketError +import socket try: from select import poll, POLLIN except ImportError: # `poll` doesn't exist on OSX and other platforms @@ -8,6 +8,7 @@ except ImportError: # `poll` doesn't exist on OSX and other platforms except ImportError: # `select` doesn't exist on AppEngine. select = False + def is_connection_dropped(conn): # Platform-specific """ Returns True if the connection is dropped and should be closed. @@ -22,7 +23,7 @@ def is_connection_dropped(conn): # Platform-specific if sock is False: # Platform-specific: AppEngine return False if sock is None: # Connection already closed (such as by httplib). - return False + return True if not poll: if not select: # Platform-specific: AppEngine @@ -30,7 +31,7 @@ def is_connection_dropped(conn): # Platform-specific try: return select([sock], [], [], 0.0)[0] - except SocketError: + except socket.error: return True # This version is better on platforms that support it. @@ -42,4 +43,55 @@ def is_connection_dropped(conn): # Platform-specific return True +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, socket_options=None): + """Connect to *address* and return the socket object. + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + # This is the only addition urllib3 makes to this function. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/request.py b/awx/lib/site-packages/requests/packages/urllib3/util/request.py index d48d6513b1..bc64f6b1fb 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/request.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/request.py @@ -1,13 +1,12 @@ from base64 import b64encode -from ..packages import six - +from ..packages.six import b ACCEPT_ENCODING = 'gzip,deflate' def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None): + basic_auth=None, proxy_basic_auth=None, disable_cache=None): """ Shortcuts for generating request headers. @@ -32,7 +31,10 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. - Example: :: + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} @@ -57,12 +59,13 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, if basic_auth: headers['authorization'] = 'Basic ' + \ - b64encode(six.b(basic_auth)).decode('utf-8') + b64encode(b(basic_auth)).decode('utf-8') if proxy_basic_auth: headers['proxy-authorization'] = 'Basic ' + \ - b64encode(six.b(proxy_basic_auth)).decode('utf-8') + b64encode(b(proxy_basic_auth)).decode('utf-8') + + if disable_cache: + headers['cache-control'] = 'no-cache' return headers - - diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/response.py b/awx/lib/site-packages/requests/packages/urllib3/util/response.py index d0325bc6b5..45fff55246 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/response.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/response.py @@ -5,9 +5,18 @@ def is_fp_closed(obj): :param obj: The file-like object to check. """ - if hasattr(obj, 'fp'): - # Object is a container for another file-like object that gets released - # on exhaustion (e.g. HTTPResponse) - return obj.fp is None - return obj.closed + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/retry.py b/awx/lib/site-packages/requests/packages/urllib3/util/retry.py new file mode 100644 index 0000000000..aeaf8a0253 --- /dev/null +++ b/awx/lib/site-packages/requests/packages/urllib3/util/retry.py @@ -0,0 +1,285 @@ +import time +import logging + +from ..exceptions import ( + ConnectTimeoutError, + MaxRetryError, + ProtocolError, + ReadTimeoutError, + ResponseError, +) +from ..packages import six + + +log = logging.getLogger(__name__) + + +class Retry(object): + """ Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. It's a good idea to set this to some sensibly-high value to + account for unexpected edge cases and avoid infinite retry loops. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param iterable method_whitelist: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + indempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + + :param iterable status_forcelist: + A set of HTTP status codes that we should force a retry on. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts. urllib3 will sleep for:: + + {backoff factor} * (2 ^ ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.MAX_BACKOFF`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + """ + + DEFAULT_METHOD_WHITELIST = frozenset([ + 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__(self, total=10, connect=None, read=None, redirect=None, + method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, + backoff_factor=0, raise_on_redirect=True, _observed_errors=0): + + self.total = total + self.connect = connect + self.read = read + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.method_whitelist = method_whitelist + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self._observed_errors = _observed_errors # TODO: use .history instead? + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, read=self.read, redirect=self.redirect, + method_whitelist=self.method_whitelist, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + _observed_errors=self._observed_errors, + ) + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """ Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r" % (retries, new_retries)) + return new_retries + + def get_backoff_time(self): + """ Formula for computing the current backoff + + :rtype: float + """ + if self._observed_errors <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def sleep(self): + """ Sleep between retry attempts using an exponential backoff. + + By default, the backoff factor is 0 and this method will return + immediately. + """ + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def _is_connection_error(self, err): + """ Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """ Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def is_forced_retry(self, method, status_code): + """ Is this method/response retryable? (Based on method/codes whitelists) + """ + if self.method_whitelist and method.upper() not in self.method_whitelist: + return False + + return self.status_forcelist and status_code in self.status_forcelist + + def is_exhausted(self): + """ Are we out of retries? """ + retry_counts = (self.total, self.connect, self.read, self.redirect) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None): + """ Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + _observed_errors = self._observed_errors + connect = self.connect + read = self.read + redirect = self.redirect + cause = 'unknown' + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + _observed_errors += 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False: + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + _observed_errors += 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = 'too many redirects' + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and a the given method is in the whitelist + _observed_errors += 1 + cause = ResponseError.GENERIC_ERROR + if response and response.status: + cause = ResponseError.SPECIFIC_ERROR.format( + status_code=response.status) + + new_retry = self.new( + total=total, + connect=connect, read=read, redirect=redirect, + _observed_errors=_observed_errors) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry)) + + return new_retry + + + def __repr__(self): + return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' + 'read={self.read}, redirect={self.redirect})').format( + cls=type(self), self=self) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/ssl_.py b/awx/lib/site-packages/requests/packages/urllib3/util/ssl_.py index dee4b87629..a788b1b98c 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/ssl_.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/ssl_.py @@ -4,18 +4,84 @@ from hashlib import md5, sha1 from ..exceptions import SSLError -try: # Test for SSL features - SSLContext = None - HAS_SNI = False +SSLContext = None +HAS_SNI = False +create_default_context = None - import ssl +import errno +import ssl + +try: # Test for SSL features from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import SSLContext # Modern SSL? from ssl import HAS_SNI # Has SNI? except ImportError: pass +try: + from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + +try: + from ssl import _DEFAULT_CIPHERS +except ImportError: + _DEFAULT_CIPHERS = ( + 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' + 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' + 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' + ) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + import sys + + class SSLContext(object): # Platform-specific: Python 2 & 3.1 + supports_set_ciphers = sys.version_info >= (2, 7) + + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, location): + self.ca_certs = location + + def set_ciphers(self, cipher_suite): + if not self.supports_set_ciphers: + raise TypeError( + 'Your version of Python does not support setting ' + 'a custom cipher suite. Please upgrade to Python ' + '2.7, 3.2, or later if you need this functionality.' + ) + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None): + kwargs = { + 'keyfile': self.keyfile, + 'certfile': self.certfile, + 'ca_certs': self.ca_certs, + 'cert_reqs': self.verify_mode, + 'ssl_version': self.protocol, + } + if self.supports_set_ciphers: # Platform-specific: Python 2.7+ + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + else: # Platform-specific: Python 2.6 + return wrap_socket(socket, **kwargs) + + def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. @@ -34,10 +100,9 @@ def assert_fingerprint(cert, fingerprint): } fingerprint = fingerprint.replace(':', '').lower() + digest_length, odd = divmod(len(fingerprint), 2) - digest_length, rest = divmod(len(fingerprint), 2) - - if rest or digest_length not in hashfunc_map: + if odd or digest_length not in hashfunc_map: raise SSLError('Fingerprint is of invalid length.') # We need encode() here for py32; works on py2 and p33. @@ -92,42 +157,98 @@ def resolve_ssl_version(candidate): return candidate -if SSLContext is not None: # Python 3.2+ - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - """ - All arguments except `server_hostname` have the same meaning as for - :func:`ssl.wrap_socket` +def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, + options=None, ciphers=None): + """All arguments have the same meaning as ``ssl_wrap_socket``. - :param server_hostname: - Hostname of the expected certificate - """ - context = SSLContext(ssl_version) - context.verify_mode = cert_reqs + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: - # Disable TLS compression to migitate CRIME attack (issue #309) - OP_NO_COMPRESSION = 0x20000 - context.options |= OP_NO_COMPRESSION + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers - if ca_certs: - try: - context.load_verify_locations(ca_certs) - # Py32 raises IOError - # Py33 raises FileNotFoundError - except Exception as e: # Reraise as SSLError + If you wish to enable SSLv3, you can do:: + + from urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + + context.options |= options + + if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 + context.set_ciphers(ciphers or _DEFAULT_CIPHERS) + + context.verify_mode = cert_reqs + if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 + context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED) + return context + + +def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None, ciphers=None, ssl_context=None): + """ + All arguments except for server_hostname and ssl_context have the same + meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. This is not + supported on Python 2.6 as the ssl module does not support it. + """ + context = ssl_context + if context is None: + context = create_urllib3_context(ssl_version, cert_reqs, + ciphers=ciphers) + + if ca_certs: + try: + context.load_verify_locations(ca_certs) + except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 + raise SSLError(e) + # Py33 raises FileNotFoundError which subclasses OSError + # These are not equivalent unless we check the errno attribute + except OSError as e: # Platform-specific: Python 3.3 and beyond + if e.errno == errno.ENOENT: raise SSLError(e) - if certfile: - # FIXME: This block needs a test. - context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - return context.wrap_socket(sock) - -else: # Python 3.1 and earlier - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - return wrap_socket(sock, keyfile=keyfile, certfile=certfile, - ca_certs=ca_certs, cert_reqs=cert_reqs, - ssl_version=ssl_version) + raise + if certfile: + context.load_cert_chain(certfile, keyfile) + if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI + return context.wrap_socket(sock, server_hostname=server_hostname) + return context.wrap_socket(sock) diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/timeout.py b/awx/lib/site-packages/requests/packages/urllib3/util/timeout.py index 4f947cb249..ea7027f3f5 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/timeout.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/timeout.py @@ -1,32 +1,49 @@ +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT import time from ..exceptions import TimeoutStateError +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() def current_time(): """ - Retrieve the current time, this function is mocked out in unit testing. + Retrieve the current time. This function is mocked out in unit testing. """ return time.time() -_Default = object() -# The default timeout to use for socket connections. This is the attribute used -# by httplib to define the default timeout - - class Timeout(object): - """ - Utility object for storing timeout values. + """ Timeout configuration. - Example usage: + Timeouts can be defined as a default for a pool:: - .. code-block:: python + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') - timeout = urllib3.util.Timeout(connect=2.0, read=7.0) - pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout) - pool.request(...) # Etc, etc + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``:: + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server @@ -47,25 +64,15 @@ class Timeout(object): :type read: integer, float, or None - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - .. note:: Many factors can affect the total amount of time for urllib3 to return - an HTTP response. Specifically, Python's DNS resolver does not obey the - timeout specified on the socket. Other factors that can affect total - request time include high CPU load, high swap, the program running at a - low priority level, or other behaviors. The observed running time for - urllib3 to return a response may be greater than the value passed to - `total`. + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, @@ -73,8 +80,8 @@ class Timeout(object): response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not ever trigger, even though the request will - take several minutes to complete. + of 20 seconds will not trigger, even though the request will take + several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow @@ -94,17 +101,16 @@ class Timeout(object): return '%s(connect=%r, read=%r, total=%r)' % ( type(self).__name__, self._connect, self._read, self.total) - @classmethod def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid + """ Check that a timeout attribute is valid. :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is used - for clear error messages - :return: the value - :raises ValueError: if the type is not an integer or a float, or if it - is a numeric value less than zero + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If the type is not an integer or a float, or if it + is a numeric value less than zero. """ if value is _Default: return cls.DEFAULT_TIMEOUT @@ -123,7 +129,7 @@ class Timeout(object): raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than 0." % (name, value)) - except TypeError: # Python 3 + except TypeError: # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int or float." % (name, value)) @@ -135,12 +141,12 @@ class Timeout(object): The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value passed - to this function. + object that sets the individual timeouts to the ``timeout`` value + passed to this function. - :param timeout: The legacy timeout value + :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None - :return: a Timeout object + :return: Timeout object :rtype: :class:`Timeout` """ return Timeout(read=timeout, connect=timeout) @@ -174,7 +180,7 @@ class Timeout(object): def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. - :return: the elapsed time + :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. @@ -191,7 +197,7 @@ class Timeout(object): This will be a positive float or integer, the value None (never timeout), or the default system timeout. - :return: the connect timeout + :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """ if self.total is None: @@ -214,7 +220,7 @@ class Timeout(object): established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. - :return: the value to use for the read timeout + :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. @@ -223,7 +229,7 @@ class Timeout(object): self.total is not self.DEFAULT_TIMEOUT and self._read is not None and self._read is not self.DEFAULT_TIMEOUT): - # in case the connect timeout has not yet been established. + # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), diff --git a/awx/lib/site-packages/requests/packages/urllib3/util/url.py b/awx/lib/site-packages/requests/packages/urllib3/util/url.py index 362d216089..b2ec834fe7 100644 --- a/awx/lib/site-packages/requests/packages/urllib3/util/url.py +++ b/awx/lib/site-packages/requests/packages/urllib3/util/url.py @@ -3,15 +3,20 @@ from collections import namedtuple from ..exceptions import LocationParseError -class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): +url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] + + +class Url(namedtuple('Url', url_attrs)): """ Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. """ slots = () - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, + query=None, fragment=None): + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, + query, fragment) @property def hostname(self): @@ -35,6 +40,48 @@ class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', return '%s:%d' % (self.host, self.port) return self.host + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = '' + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + '://' + if auth is not None: + url += auth + '@' + if host is not None: + url += host + if port is not None: + url += ':' + str(port) + if path is not None: + url += path + if query is not None: + url += '?' + query + if fragment is not None: + url += '#' + fragment + + return url + + def __str__(self): + return self.url def split_first(s, delims): """ @@ -43,7 +90,7 @@ def split_first(s, delims): If not found, then the first part is the full input string. - Example: :: + Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') @@ -76,10 +123,10 @@ def parse_url(url): Partly backwards-compatible with :mod:`urlparse`. - Example: :: + Example:: >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/', ...) + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') @@ -91,6 +138,10 @@ def parse_url(url): # Additionally, this implementations does silly things to be optimal # on CPython. + if not url: + # Empty + return Url() + scheme = None auth = None host = None @@ -153,7 +204,6 @@ def parse_url(url): return Url(scheme, auth, host, port, path, query, fragment) - def get_host(url): """ Deprecated. Use :func:`.parse_url` instead. diff --git a/awx/lib/site-packages/requests/sessions.py b/awx/lib/site-packages/requests/sessions.py index df85a25c11..4f30696353 100644 --- a/awx/lib/site-packages/requests/sessions.py +++ b/awx/lib/site-packages/requests/sessions.py @@ -13,7 +13,7 @@ from collections import Mapping from datetime import datetime from .auth import _basic_auth_str -from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str +from .compat import cookielib, OrderedDict, urljoin, urlparse from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT @@ -21,6 +21,7 @@ from .hooks import default_hooks, dispatch_hook from .utils import to_key_val_list, default_headers, to_native_string from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) +from .packages.urllib3._collections import RecentlyUsedContainer from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter @@ -35,6 +36,8 @@ from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI +REDIRECT_CACHE_SIZE = 1000 + def merge_setting(request_setting, session_setting, dict_class=OrderedDict): """ @@ -91,10 +94,17 @@ class SessionRedirectMixin(object): """Receives a Response. Returns a generator of Responses.""" i = 0 + hist = [] # keep track of history while resp.is_redirect: prepared_request = req.copy() + if i > 0: + # Update history and keep track of redirects. + hist.append(resp) + new_hist = list(hist) + resp.history = new_hist + try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): @@ -118,17 +128,20 @@ class SessionRedirectMixin(object): parsed = urlparse(url) url = parsed.geturl() - # Facilitate non-RFC2616-compliant 'location' headers + # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. - if not urlparse(url).netloc: + if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) + # Cache the url, unless it redirects to itself. + if resp.is_permanent_redirect and req.url != prepared_request.url: + self.redirect_cache[req.url] = prepared_request.url - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 + # http://tools.ietf.org/html/rfc7231#section-6.4.4 if (resp.status_code == codes.see_other and method != 'HEAD'): method = 'GET' @@ -146,7 +159,7 @@ class SessionRedirectMixin(object): prepared_request.method = method # https://github.com/kennethreitz/requests/issues/1084 - if resp.status_code not in (codes.temporary, codes.resume): + if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): if 'Content-Length' in prepared_request.headers: del prepared_request.headers['Content-Length'] @@ -261,9 +274,10 @@ class Session(SessionRedirectMixin): """ __attrs__ = [ - 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks', - 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', - 'trust_env', 'max_redirects'] + 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', + 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', + 'max_redirects', + ] def __init__(self): @@ -316,6 +330,9 @@ class Session(SessionRedirectMixin): self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) + # Only store 1000 redirects to prevent using infinite memory + self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) + def __enter__(self): return self @@ -353,6 +370,7 @@ class Session(SessionRedirectMixin): url=request.url, files=request.files, data=request.data, + json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), @@ -374,7 +392,8 @@ class Session(SessionRedirectMixin): hooks=None, stream=None, verify=None, - cert=None): + cert=None, + json=None): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. @@ -384,17 +403,22 @@ class Session(SessionRedirectMixin): string for the :class:`Request`. :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of 'filename': file-like-objects + :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) Float describing the timeout of the - request in seconds. - :param allow_redirects: (optional) Boolean. Set to True by default. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a (`connect timeout, read + timeout <user/advanced.html#timeouts>`_) tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param stream: (optional) whether to immediately download the response @@ -405,7 +429,7 @@ class Session(SessionRedirectMixin): If Tuple, ('cert', 'key') pair. """ - method = builtin_str(method) + method = to_native_string(method) # Create the Request. req = Request( @@ -414,6 +438,7 @@ class Session(SessionRedirectMixin): headers = headers, files = files, data = data or {}, + json = json, params = params or {}, auth = auth, cookies = cookies, @@ -423,36 +448,16 @@ class Session(SessionRedirectMixin): proxies = proxies or {} - # Gather clues from the surrounding environment. - if self.trust_env: - # Set environment's proxies. - env_proxies = get_environ_proxies(url) or {} - for (k, v) in env_proxies.items(): - proxies.setdefault(k, v) - - # Look for configuration. - if not verify and verify is not False: - verify = os.environ.get('REQUESTS_CA_BUNDLE') - - # Curl compatibility. - if not verify and verify is not False: - verify = os.environ.get('CURL_CA_BUNDLE') - - # Merge all the kwargs. - proxies = merge_setting(proxies, self.proxies) - stream = merge_setting(stream, self.stream) - verify = merge_setting(verify, self.verify) - cert = merge_setting(cert, self.cert) + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) # Send the request. send_kwargs = { - 'stream': stream, 'timeout': timeout, - 'verify': verify, - 'cert': cert, - 'proxies': proxies, 'allow_redirects': allow_redirects, } + send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp @@ -487,15 +492,16 @@ class Session(SessionRedirectMixin): kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) - def post(self, url, data=None, **kwargs): + def post(self, url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ - return self.request('POST', url, data=data, **kwargs) + return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): """Sends a PUT request. Returns :class:`Response` object. @@ -540,6 +546,14 @@ class Session(SessionRedirectMixin): if not isinstance(request, PreparedRequest): raise ValueError('You can only send PreparedRequests.') + checked_urls = set() + while request.url in self.redirect_cache: + checked_urls.add(request.url) + new_url = self.redirect_cache.get(request.url) + if new_url in checked_urls: + break + request.url = new_url + # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') @@ -597,6 +611,30 @@ class Session(SessionRedirectMixin): return r + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """Check the environment and merge it with some settings.""" + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + env_proxies = get_environ_proxies(url) or {} + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration and be compatible + # with cURL. + if verify is True or verify is None: + verify = (os.environ.get('REQUESTS_CA_BUNDLE') or + os.environ.get('CURL_CA_BUNDLE')) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {'verify': verify, 'proxies': proxies, 'stream': stream, + 'cert': cert} + def get_adapter(self, url): """Returns the appropriate connnection adapter for the given URL.""" for (prefix, adapter) in self.adapters.items(): @@ -624,12 +662,19 @@ class Session(SessionRedirectMixin): self.adapters[key] = self.adapters.pop(key) def __getstate__(self): - return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + state['redirect_cache'] = dict(self.redirect_cache) + return state def __setstate__(self, state): + redirect_cache = state.pop('redirect_cache', {}) for attr, value in state.items(): setattr(self, attr, value) + self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) + for redirect, to in redirect_cache.items(): + self.redirect_cache[redirect] = to + def session(): """Returns a :class:`Session` for context-management.""" diff --git a/awx/lib/site-packages/requests/status_codes.py b/awx/lib/site-packages/requests/status_codes.py index ed7a8660a6..e0887f210a 100644 --- a/awx/lib/site-packages/requests/status_codes.py +++ b/awx/lib/site-packages/requests/status_codes.py @@ -30,7 +30,8 @@ _codes = { 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('resume_incomplete', 'resume'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 # Client Error. 400: ('bad_request', 'bad'), diff --git a/awx/lib/site-packages/requests/structures.py b/awx/lib/site-packages/requests/structures.py index 9fd78187f2..3e5f2faa2e 100644 --- a/awx/lib/site-packages/requests/structures.py +++ b/awx/lib/site-packages/requests/structures.py @@ -8,30 +8,7 @@ Data structures that power Requests. """ -import os import collections -from itertools import islice - - -class IteratorProxy(object): - """docstring for IteratorProxy""" - def __init__(self, i): - self.i = i - # self.i = chain.from_iterable(i) - - def __iter__(self): - return self.i - - def __len__(self): - if hasattr(self.i, '__len__'): - return len(self.i) - if hasattr(self.i, 'len'): - return self.i.len - if hasattr(self.i, 'fileno'): - return os.fstat(self.i.fileno()).st_size - - def read(self, n): - return "".join(islice(self.i, None, n)) class CaseInsensitiveDict(collections.MutableMapping): @@ -46,7 +23,7 @@ class CaseInsensitiveDict(collections.MutableMapping): case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains - testing is case insensitive: + testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' diff --git a/awx/lib/site-packages/requests/utils.py b/awx/lib/site-packages/requests/utils.py index 68e50cf0a9..7467941447 100644 --- a/awx/lib/site-packages/requests/utils.py +++ b/awx/lib/site-packages/requests/utils.py @@ -19,6 +19,7 @@ import re import sys import socket import struct +import warnings from . import __version__ from . import certs @@ -114,7 +115,7 @@ def get_netrc_auth(url): def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, 'name', None) - if name and name[0] != '<' and name[-1] != '>': + if name and isinstance(name, builtin_str) and name[0] != '<' and name[-1] != '>': return os.path.basename(name) @@ -287,6 +288,11 @@ def get_encodings_from_content(content): :param content: bytestring to extract encodings from. """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) @@ -351,12 +357,14 @@ def get_unicode_from_response(r): Tried: 1. charset from content-type - - 2. every encodings from ``<meta ... charset=XXX>`` - - 3. fall back and replace all unicode characters + 2. fall back and replace all unicode characters """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) tried_encodings = [] @@ -554,7 +562,8 @@ def default_headers(): return CaseInsensitiveDict({ 'User-Agent': default_user_agent(), 'Accept-Encoding': ', '.join(('gzip', 'deflate')), - 'Accept': '*/*' + 'Accept': '*/*', + 'Connection': 'keep-alive', }) @@ -569,7 +578,7 @@ def parse_header_links(value): replace_chars = " '\"" - for val in value.split(","): + for val in re.split(", *<", value): try: url, params = val.split(";", 1) except ValueError: @@ -671,3 +680,18 @@ def to_native_string(string, encoding='ascii'): out = string.decode(encoding) return out + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) diff --git a/awx/lib/site-packages/setuptools/__init__.py b/awx/lib/site-packages/setuptools/__init__.py index fc9b7b936c..8188f12528 100644 --- a/awx/lib/site-packages/setuptools/__init__.py +++ b/awx/lib/site-packages/setuptools/__init__.py @@ -1,16 +1,17 @@ """Extensions to the 'distutils' for large or complex distributions""" import os -import sys import distutils.core import distutils.filelist from distutils.core import Command as _Command from distutils.util import convert_path +from fnmatch import fnmatchcase import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution, Feature, _get_unpatched from setuptools.depends import Require +from setuptools.compat import filterfalse __all__ = [ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', @@ -27,33 +28,87 @@ run_2to3_on_doctests = True # Standard package names for fixer packages lib2to3_fixer_packages = ['lib2to3.fixes'] -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - looks_like_package = ( - '.' not in name - and os.path.isdir(fn) - and os.path.isfile(os.path.join(fn, '__init__.py')) - ) - if looks_like_package: - out.append(prefix+name) - stack.append((fn, prefix+name+'.')) - for pat in list(exclude)+['ez_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out +class PackageFinder(object): + @classmethod + def find(cls, where='.', exclude=(), include=('*',)): + """Return a list all Python packages found within directory 'where' + + 'where' should be supplied as a "cross-platform" (i.e. URL-style) + path; it will be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used + as a wildcard in the names, such that 'foo.*' will exclude all + subpackages of 'foo' (but not 'foo' itself). + + 'include' is a sequence of package names to include. If it's + specified, only the named packages will be included. If it's not + specified, all found packages will be included. 'include' can contain + shell style wildcard patterns just like 'exclude'. + + The list of included packages is built up first and then any + explicitly excluded packages are removed from it. + """ + out = cls._find_packages_iter(convert_path(where)) + out = cls.require_parents(out) + includes = cls._build_filter(*include) + excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) + out = filter(includes, out) + out = filterfalse(excludes, out) + return list(out) + + @staticmethod + def require_parents(packages): + """ + Exclude any apparent package that apparently doesn't include its + parent. + + For example, exclude 'foo.bar' if 'foo' is not present. + """ + found = [] + for pkg in packages: + base, sep, child = pkg.rpartition('.') + if base and base not in found: + continue + found.append(pkg) + yield pkg + + @staticmethod + def _all_dirs(base_path): + """ + Return all dirs in base_path, relative to base_path + """ + for root, dirs, files in os.walk(base_path, followlinks=True): + for dir in dirs: + yield os.path.relpath(os.path.join(root, dir), base_path) + + @classmethod + def _find_packages_iter(cls, base_path): + dirs = cls._all_dirs(base_path) + suitable = filterfalse(lambda n: '.' in n, dirs) + return ( + path.replace(os.path.sep, '.') + for path in suitable + if cls._looks_like_package(os.path.join(base_path, path)) + ) + + @staticmethod + def _looks_like_package(path): + return os.path.isfile(os.path.join(path, '__init__.py')) + + @staticmethod + def _build_filter(*patterns): + """ + Given a list of patterns, return a callable that will be true only if + the input matches one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(path): + return True + +find_packages = PackageFinder.find setup = distutils.core.setup @@ -83,7 +138,7 @@ def findall(dir = os.curdir): (relative to 'dir'). """ all_files = [] - for base, dirs, files in os.walk(dir): + for base, dirs, files in os.walk(dir, followlinks=True): if base==os.curdir or base.startswith(os.curdir+os.sep): base = base[2:] if base: @@ -92,7 +147,3 @@ def findall(dir = os.curdir): return all_files distutils.filelist.findall = findall # fix findall bug in distutils. - -# sys.dont_write_bytecode was introduced in Python 2.6. -_dont_write_bytecode = getattr(sys, 'dont_write_bytecode', - bool(os.environ.get("PYTHONDONTWRITEBYTECODE"))) diff --git a/awx/lib/site-packages/setuptools/archive_util.py b/awx/lib/site-packages/setuptools/archive_util.py index 1109f34677..b3c9fa5690 100644 --- a/awx/lib/site-packages/setuptools/archive_util.py +++ b/awx/lib/site-packages/setuptools/archive_util.py @@ -6,42 +6,25 @@ __all__ = [ "UnrecognizedFormat", "extraction_drivers", "unpack_directory", ] -import zipfile, tarfile, os, shutil, posixpath -from pkg_resources import ensure_directory +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from pkg_resources import ensure_directory, ContextualZipFile from distutils.errors import DistutilsError class UnrecognizedFormat(DistutilsError): """Couldn't recognize the archive type""" def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" + """The default progress/filter callback; returns True for all files""" return dst - - - - - - - - - - - - - - - - - - - - - def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): + drivers=None): """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` `progress_filter` is a function taking two arguments: a source path @@ -75,52 +58,33 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter, ) - - - - - def unpack_directory(filename, extract_dir, progress_filter=default_filter): """"Unpack" a directory, using the same interface as for archives Raises ``UnrecognizedFormat`` if `filename` is not a directory """ if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) + raise UnrecognizedFormat("%s is not a directory" % filename) - paths = {filename:('',extract_dir)} + paths = { + filename: ('', extract_dir), + } for base, dirs, files in os.walk(filename): - src,dst = paths[base] + src, dst = paths[base] for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) + paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) + target = os.path.join(dst, f) + target = progress_filter(src + f, target) if not target: - continue # skip non-files + # skip non-files + continue ensure_directory(target) - f = os.path.join(base,f) + f = os.path.join(base, f) shutil.copyfile(f, target) shutil.copystat(f, target) - - - - - - - - - - - - - - - - def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): """Unpack zip `filename` to `extract_dir` @@ -132,8 +96,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): if not zipfile.is_zipfile(filename): raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - z = zipfile.ZipFile(filename) - try: + with ContextualZipFile(filename) as z: for info in z.infolist(): name = info.filename @@ -152,17 +115,11 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): # file ensure_directory(target) data = z.read(info.filename) - f = open(target,'wb') - try: + with open(target, 'wb') as f: f.write(data) - finally: - f.close() - del data unix_attributes = info.external_attr >> 16 if unix_attributes: os.chmod(target, unix_attributes) - finally: - z.close() def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): @@ -178,19 +135,22 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): raise UnrecognizedFormat( "%s is not a compressed or uncompressed tar file" % (filename,) ) - try: - tarobj.chown = lambda *args: None # don't do any chowning! + with contextlib.closing(tarobj): + # don't do any chowning! + tarobj.chown = lambda *args: None for member in tarobj: name = member.name # don't extract absolute paths or ones with .. in them if not name.startswith('/') and '..' not in name.split('/'): prelim_dst = os.path.join(extract_dir, *name.split('/')) - # resolve any links and to extract the link targets as normal files + # resolve any links and to extract the link targets as normal + # files while member is not None and (member.islnk() or member.issym()): linkpath = member.linkname if member.issym(): - linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) + base = posixpath.dirname(member.name) + linkpath = posixpath.join(base, linkpath) linkpath = posixpath.normpath(linkpath) member = tarobj._getmember(linkpath) @@ -200,11 +160,11 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): if final_dst.endswith(os.sep): final_dst = final_dst[:-1] try: - tarobj._extract_member(member, final_dst) # XXX Ugh + # XXX Ugh + tarobj._extract_member(member, final_dst) except tarfile.ExtractError: - pass # chown/chmod/mkfifo/mknode/makedev failed + # chown/chmod/mkfifo/mknode/makedev failed + pass return True - finally: - tarobj.close() extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/awx/lib/site-packages/setuptools/command/__init__.py b/awx/lib/site-packages/setuptools/command/__init__.py index 29c9d75ad1..f6dbc39c40 100644 --- a/awx/lib/site-packages/setuptools/command/__init__.py +++ b/awx/lib/site-packages/setuptools/command/__init__.py @@ -5,10 +5,11 @@ __all__ = [ 'register', 'bdist_wininst', 'upload_docs', ] -from setuptools.command import install_scripts +from distutils.command.bdist import bdist import sys -from distutils.command.bdist import bdist +from setuptools.command import install_scripts + if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") diff --git a/awx/lib/site-packages/setuptools/command/alias.py b/awx/lib/site-packages/setuptools/command/alias.py index 52384e1a28..452a9244ea 100644 --- a/awx/lib/site-packages/setuptools/command/alias.py +++ b/awx/lib/site-packages/setuptools/command/alias.py @@ -1,27 +1,26 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * +from distutils.errors import DistutilsOptionError + from setuptools.command.setopt import edit_config, option_base, config_file + def shquote(arg): """Quote an argument for later parsing by shlex.split()""" for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) + if c in arg: + return repr(arg) if arg.split() != [arg]: return repr(arg) - return arg + return arg class alias(option_base): """Define a shortcut that invokes one or more commands""" - + description = "define a shortcut to invoke one or more commands" command_consumes_arguments = True user_options = [ - ('remove', 'r', 'remove (unset) the alias'), + ('remove', 'r', 'remove (unset) the alias'), ] + option_base.user_options boolean_options = option_base.boolean_options + ['remove'] @@ -49,7 +48,7 @@ class alias(option_base): print("setup.py alias", format_alias(alias, aliases)) return - elif len(self.args)==1: + elif len(self.args) == 1: alias, = self.args if self.remove: command = None @@ -61,9 +60,9 @@ class alias(option_base): return else: alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) + command = ' '.join(map(shquote, self.args[1:])) - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) + edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) def format_alias(name, aliases): @@ -76,7 +75,4 @@ def format_alias(name, aliases): source = '' else: source = '--filename=%r' % source - return source+name+' '+command - - - + return source + name + ' ' + command diff --git a/awx/lib/site-packages/setuptools/command/bdist_egg.py b/awx/lib/site-packages/setuptools/command/bdist_egg.py index c577615824..34fdeec21f 100644 --- a/awx/lib/site-packages/setuptools/command/bdist_egg.py +++ b/awx/lib/site-packages/setuptools/command/bdist_egg.py @@ -3,26 +3,33 @@ Build .egg distributions""" # This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command +from distutils.errors import DistutilsSetupError from distutils.dir_util import remove_tree, mkpath +from distutils import log +from types import CodeType +import sys +import os +import marshal +import textwrap + +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from setuptools.compat import basestring +from setuptools.extension import Library +from setuptools import Command + try: # Python 2.7 or >=3.2 from sysconfig import get_path, get_python_version + def _get_purelib(): return get_path("purelib") except ImportError: from distutils.sysconfig import get_python_lib, get_python_version + def _get_purelib(): return get_python_lib(False) -from distutils import log -from distutils.errors import DistutilsSetupError -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from types import CodeType -from setuptools.compat import basestring, next -from setuptools.extension import Library def strip_module(filename): if '.' in filename: @@ -31,66 +38,45 @@ def strip_module(filename): filename = filename[:-6] return filename -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " __loader__ = None; del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' +def write_stub(resource, pyfile): + _stub_template = textwrap.dedent(""" + def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__, %r) + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) + __bootstrap__() + """).lstrip() + with open(pyfile, 'w') as f: + f.write(_stub_template % resource) + class bdist_egg(Command): - description = "create an \"egg\" distribution" user_options = [ ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), + "temporary directory for creating the distribution"), + ('plat-name=', 'p', "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), ('exclude-source-files', None, - "remove all .py files from the generated egg"), + "remove all .py files from the generated egg"), ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), ('dist-dir=', 'd', - "directory to put final built distributions in"), + "directory to put final built distributions in"), ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), + "skip rebuilding everything (for testing/debugging)"), ] boolean_options = [ 'keep-temp', 'skip-build', 'exclude-source-files' ] - - - - - - - - - - - - - - - - - def initialize_options (self): + def initialize_options(self): self.bdist_dir = None self.plat_name = None self.keep_temp = 0 @@ -99,7 +85,6 @@ class bdist_egg(Command): self.egg_output = None self.exclude_source_files = None - def finalize_options(self): ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") self.egg_info = ei_cmd.egg_info @@ -111,7 +96,7 @@ class bdist_egg(Command): if self.plat_name is None: self.plat_name = get_build_platform() - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) if self.egg_output is None: @@ -122,32 +107,25 @@ class bdist_egg(Command): self.distribution.has_ext_modules() and self.plat_name ).egg_name() - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - + self.egg_output = os.path.join(self.dist_dir, basename + '.egg') def do_install_data(self): # Hack for packages that install data to install's --install-lib self.get_finalized_command('install').install_lib = self.bdist_dir site_packages = os.path.normcase(os.path.realpath(_get_purelib())) - old, self.distribution.data_files = self.distribution.data_files,[] + old, self.distribution.data_files = self.distribution.data_files, [] for item in old: - if isinstance(item,tuple) and len(item)==2: + if isinstance(item, tuple) and len(item) == 2: if os.path.isabs(item[0]): realpath = os.path.realpath(item[0]) normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep + if normalized == site_packages or normalized.startswith( + site_packages + os.sep ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? + item = realpath[len(site_packages) + 1:], item[1] + # XXX else: raise ??? self.distribution.data_files.append(item) try: @@ -156,22 +134,19 @@ class bdist_egg(Command): finally: self.distribution.data_files = old - def get_outputs(self): return [self.egg_output] - - def call_command(self,cmdname,**kw): + def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) + kw.setdefault(dirname, self.bdist_dir) + kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd - def run(self): # Generate metadata first self.run_command("egg_info") @@ -179,7 +154,8 @@ class bdist_egg(Command): # pull their data path from the install_lib command. log.info("installing library code to %s" % self.bdist_dir) instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None + old_root = instcmd.root + instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0) @@ -188,17 +164,17 @@ class bdist_egg(Command): all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') + for (p, ext_name) in enumerate(ext_outputs): + filename, ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s" % ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') + ext_outputs[p] = ext_name.replace(os.sep, '/') - to_compile.extend(self.make_init_files()) if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: @@ -206,12 +182,13 @@ class bdist_egg(Command): # Make the EGG-INFO directory archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') + egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) + self.call_command('install_scripts', install_dir=script_dir, + no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") @@ -229,10 +206,10 @@ class bdist_egg(Command): os.unlink(native_libs) write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() + os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() ) - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): + if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." @@ -243,61 +220,33 @@ class bdist_egg(Command): # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) + dry_run=self.dry_run, mode=self.gen_header()) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - - + getattr(self.distribution, 'dist_files', []).append( + ('bdist_egg', get_python_version(), self.egg_output)) def zap_pyfiles(self): log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): + for base, dirs, files in walk_egg(self.bdist_dir): for name in files: if name.endswith('.py'): - path = os.path.join(base,name) + path = os.path.join(base, name) log.debug("Deleting %s", path) os.unlink(path) def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) + safe = getattr(self.distribution, 'zip_safe', None) if safe is not None: return safe log.warn("zip_safe flag not set; analyzing archive contents...") return analyze_egg(self.bdist_dir, self.stubs) - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - def gen_header(self): epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation',{}).get('eggsecutable') + ep = epm.get('setuptools.installation', {}).get('eggsecutable') if ep is None: return 'w' # not an eggsecutable, do it the usual way. @@ -325,7 +274,6 @@ class bdist_egg(Command): ' echo Please rename it back to %(basename)s and try again.\n' ' exec false\n' 'fi\n' - ) % locals() if not self.dry_run: @@ -335,13 +283,12 @@ class bdist_egg(Command): f.close() return 'a' - def copy_metadata_to(self, target_dir): "Copy metadata (egg info) to the target_dir" # normalize the path (so that a forward-slash in egg_info will # match using startswith below) norm_egg_info = os.path.normpath(self.egg_info) - prefix = os.path.join(norm_egg_info,'') + prefix = os.path.join(norm_egg_info, '') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) @@ -354,23 +301,24 @@ class bdist_egg(Command): all_outputs = [] ext_outputs = [] - paths = {self.bdist_dir:''} + paths = {self.bdist_dir: ''} for base, dirs, files in os.walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) + all_outputs.append(paths[base] + filename) for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' + paths[os.path.join(base, filename)] = (paths[base] + + filename + '/') if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: - if isinstance(ext,Library): + if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): + if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs @@ -379,24 +327,24 @@ class bdist_egg(Command): NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - def walk_egg(egg_dir): """Walk an unpacked egg's contents, skipping the metadata directory""" walker = os.walk(egg_dir) - base,dirs,files = next(walker) + base, dirs, files = next(walker) if 'EGG-INFO' in dirs: dirs.remove('EGG-INFO') - yield base,dirs,files + yield base, dirs, files for bdf in walker: yield bdf + def analyze_egg(egg_dir, stubs): # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): + for flag, fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): return flag - if not can_scan(): return False + if not can_scan(): + return False safe = True for base, dirs, files in walk_egg(egg_dir): for name in files: @@ -407,35 +355,42 @@ def analyze_egg(egg_dir, stubs): safe = scan_module(egg_dir, base, name, stubs) and safe return safe + def write_safety_flag(egg_dir, safe): # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): + for flag, fn in safety_flags.items(): fn = os.path.join(egg_dir, fn) if os.path.exists(fn): if safe is None or bool(safe) != flag: os.unlink(fn) - elif safe is not None and bool(safe)==flag: - f=open(fn,'wt'); f.write('\n'); f.close() + elif safe is not None and bool(safe) == flag: + f = open(fn, 'wt') + f.write('\n') + f.close() + safety_flags = { True: 'zip-safe', False: 'not-zip-safe', } + def scan_module(egg_dir, base, name, stubs): """Check whether module possibly uses unsafe-for-zipfile stuff""" - filename = os.path.join(base,name) + filename = os.path.join(base, name) if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] + return True # Extension module + pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') + module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] if sys.version_info < (3, 3): - skip = 8 # skip magic & date + skip = 8 # skip magic & date else: skip = 12 # skip magic & date & file size - f = open(filename,'rb'); f.read(skip) - code = marshal.load(f); f.close() + f = open(filename, 'rb') + f.read(skip) + code = marshal.load(f) + f.close() safe = True symbols = dict.fromkeys(iter_symbols(code)) for bad in ['__file__', '__path__']: @@ -452,21 +407,24 @@ def scan_module(egg_dir, base, name, stubs): log.warn("%s: module MAY be using inspect.%s", module, bad) safe = False if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 + if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5 log.warn("%s: top-level module may be 'python -m' script", module) safe = False return safe + def iter_symbols(code): """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name + for name in code.co_names: + yield name for const in code.co_consts: - if isinstance(const,basestring): + if isinstance(const, basestring): yield const - elif isinstance(const,CodeType): + elif isinstance(const, CodeType): for name in iter_symbols(const): yield name + def can_scan(): if not sys.platform.startswith('java') and sys.platform != 'cli': # CPython, PyPy, etc. @@ -475,39 +433,6 @@ def can_scan(): log.warn("Please ask the author to include a 'zip_safe'" " setting (either True or False) in the package's setup.py") - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Attribute names of options for commands that might need to be convinced to # install to the egg build directory @@ -515,9 +440,9 @@ INSTALL_DIRECTORY_ATTRS = [ 'install_lib', 'install_dir', 'install_data', 'install_base' ] + def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, - mode='w' -): + mode='w'): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed @@ -525,6 +450,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, raises DistutilsExecError. Returns the name of the output zip file. """ import zipfile + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) @@ -532,13 +458,14 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, for name in names: path = os.path.normpath(os.path.join(dirname, name)) if os.path.isfile(path): - p = path[len(base_dir)+1:] + p = path[len(base_dir) + 1:] if not dry_run: z.write(path, p) log.debug("adding '%s'" % p) if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits + # avoid 2.3 zipimport bug when 64 bits + compress = (sys.version >= "2.4") compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] if not dry_run: @@ -550,4 +477,3 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, for dirname, dirs, files in os.walk(base_dir): visit(None, dirname, files) return zip_filename -# diff --git a/awx/lib/site-packages/setuptools/command/bdist_rpm.py b/awx/lib/site-packages/setuptools/command/bdist_rpm.py index 8c48da3559..70730927ec 100644 --- a/awx/lib/site-packages/setuptools/command/bdist_rpm.py +++ b/awx/lib/site-packages/setuptools/command/bdist_rpm.py @@ -1,51 +1,30 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. +import distutils.command.bdist_rpm as orig -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os -class bdist_rpm(_bdist_rpm): +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + 3. Replace dash with underscore in the version numbers for better RPM + compatibility. + """ def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - + # ensure distro name is up-to-date + self.run_command('egg_info') + orig.bdist_rpm.run(self) def _make_spec_file(self): version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ + rpmversion = version.replace('-', '_') + spec = orig.bdist_rpm._make_spec_file(self) + line23 = '%define version ' + version + line24 = '%define version ' + rpmversion + spec = [ line.replace( "Source0: %{name}-%{version}.tar", "Source0: %{name}-%{unmangled_version}.tar" @@ -55,28 +34,10 @@ class bdist_rpm(_bdist_rpm): ).replace( "%setup", "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) + ).replace(line23, line24) for line in spec ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) + insert_loc = spec.index(line24) + 1 + unmangled_version = "%define unmangled_version " + version + spec.insert(insert_loc, unmangled_version) return spec - - - - - - - - - - - - - - - - - - - - diff --git a/awx/lib/site-packages/setuptools/command/bdist_wininst.py b/awx/lib/site-packages/setuptools/command/bdist_wininst.py index e8521f834c..073de97b46 100644 --- a/awx/lib/site-packages/setuptools/command/bdist_wininst.py +++ b/awx/lib/site-packages/setuptools/command/bdist_wininst.py @@ -1,82 +1,21 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys +import distutils.command.bdist_wininst as orig -class bdist_wininst(_bdist_wininst): - _good_upload = _bad_upload = None - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - installer_name = self.get_installer_filename(fullname) - if self.target_version: - pyversion = self.target_version - # fix 2.5+ bdist_wininst ignoring --target-version spec - self._bad_upload = ('bdist_wininst', 'any', installer_name) - else: - pyversion = 'any' - self._good_upload = ('bdist_wininst', pyversion, installer_name) - - def _fix_upload_names(self): - good, bad = self._good_upload, self._bad_upload - dist_files = getattr(self.distribution, 'dist_files', []) - if bad in dist_files: - dist_files.remove(bad) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): +class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): + """ + Supplement reinitialize_command to work around + http://bugs.python.org/issue20819 + """ cmd = self.distribution.reinitialize_command( command, reinit_subcommands) if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug + cmd.install_lib = None return cmd def run(self): self._is_running = True try: - _bdist_wininst.run(self) - self._fix_upload_names() + orig.bdist_wininst.run(self) finally: self._is_running = False - - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awx/lib/site-packages/setuptools/command/build_ext.py b/awx/lib/site-packages/setuptools/command/build_ext.py index 50a039ce50..e4b2c593f3 100644 --- a/awx/lib/site-packages/setuptools/command/build_ext.py +++ b/awx/lib/site-packages/setuptools/command/build_ext.py @@ -1,25 +1,30 @@ from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler +from distutils.errors import DistutilsError +from distutils import log +import os +import sys +import itertools + +from setuptools.extension import Library + try: # Attempt to use Pyrex for building extensions, if available from Pyrex.Distutils.build_ext import build_ext as _build_ext except ImportError: _build_ext = _du_build_ext -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler try: # Python 2.7 or >=3.2 from sysconfig import _CONFIG_VARS except ImportError: from distutils.sysconfig import get_config_var + get_config_var("LDSHARED") # make sure _config_vars is initialized del get_config_var from distutils.sysconfig import _config_vars as _CONFIG_VARS -from distutils import log -from distutils.errors import * have_rtld = False use_stubs = False @@ -29,20 +34,13 @@ if sys.platform == "darwin": use_stubs = True elif os.name != 'nt': try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True + import dl + use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') except ImportError: pass -def if_dl(s): - if have_rtld: - return s - return '' - - - +if_dl = lambda s: s if have_rtld else '' class build_ext(_build_ext): @@ -62,8 +60,9 @@ class build_ext(_build_ext): modpath = fullname.split('.') package = '.'.join(modpath[:-1]) package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) # Always copy, even if source is older than destination, to ensure # that the right extensions for the current Python/platform are @@ -75,8 +74,8 @@ class build_ext(_build_ext): if ext._needs_stub: self.write_stub(package_dir or os.curdir, ext, True) - - if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): + if _build_ext is not _du_build_ext and not hasattr(_build_ext, + 'pyrex_sources'): # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 def swig_sources(self, sources, *otherargs): # first do any Pyrex processing @@ -84,18 +83,16 @@ class build_ext(_build_ext): # Then do any actual SWIG stuff on the remainder return _du_build_ext.swig_sources(self, sources, *otherargs) - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) + filename = _build_ext.get_ext_filename(self, fullname) if fullname in self.ext_map: ext = self.ext_map[fullname] - if isinstance(ext,Library): + if isinstance(ext, Library): fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) + return self.shlib_compiler.library_filename(fn, libtype) elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) return filename def initialize_options(self): @@ -109,7 +106,7 @@ class build_ext(_build_ext): self.extensions = self.extensions or [] self.check_extensions_list(self.extensions) self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] + if isinstance(ext, Library)] if self.shlibs: self.setup_shlib_compiler() for ext in self.extensions: @@ -122,11 +119,12 @@ class build_ext(_build_ext): # XXX what to do with conflicts? self.ext_map[fullname.split('.')[-1]] = ext - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) + ltd = self.shlibs and self.links_to_dynamic(ext) or False + ns = ltd and use_stubs and not isinstance(ext, Library) + ext._links_to_dynamic = ltd + ext._needs_stub = ns filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) if ltd and libdir not in ext.library_dirs: ext.library_dirs.append(libdir) if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: @@ -140,7 +138,8 @@ class build_ext(_build_ext): tmp = _CONFIG_VARS.copy() try: # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") _CONFIG_VARS['CCSHARED'] = " -dynamiclib" _CONFIG_VARS['SO'] = ".dylib" customize_compiler(compiler) @@ -154,7 +153,7 @@ class build_ext(_build_ext): compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: + for (name, value) in self.define: compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: @@ -171,23 +170,20 @@ class build_ext(_build_ext): # hack so distutils' build_extension() builds a library instead compiler.link_shared_object = link_shared_object.__get__(compiler) - - def get_export_symbols(self, ext): - if isinstance(ext,Library): + if isinstance(ext, Library): return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) + return _build_ext.get_export_symbols(self, ext) def build_extension(self, ext): _compiler = self.compiler try: - if isinstance(ext,Library): + if isinstance(ext, Library): self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) + _build_ext.build_extension(self, ext) if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) + cmd = self.get_finalized_command('build_py').build_lib + self.write_stub(cmd, ext) finally: self.compiler = _compiler @@ -197,54 +193,66 @@ class build_ext(_build_ext): # XXX as dynamic, and not just using a locally-found version or a # XXX static-compiled version libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + return any(pkg + libname in libnames for libname in ext.libraries) def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs + return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + + def __get_stubs_outputs(self): + # assemble the base name for each extension that needs a stub + ns_ext_bases = ( + os.path.join(self.build_lib, *ext._full_name.split('.')) + for ext in self.extensions + if ext._needs_stub + ) + # pair each base with the extension + pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) + return list(base + fnext for base, fnext in pairs) + + def __get_output_extensions(self): + yield '.py' + yield '.pyc' + if self.get_finalized_command('build_py').optimize: + yield '.pyo' def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") + raise DistutilsError(stub_file + " already exists! Please delete.") if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) f.close() if compile: from distutils.util import byte_compile + byte_compile([stub_file], optimize=0, force=True, dry_run=self.dry_run) optimize = self.get_finalized_command('install_lib').optimize @@ -255,14 +263,15 @@ class build_ext(_build_ext): os.unlink(stub_file) -if use_stubs or os.name=='nt': +if use_stubs or os.name == 'nt': # Build shared libraries # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( self.SHARED_LIBRARY, objects, output_libname, output_dir, libraries, library_dirs, runtime_library_dirs, export_symbols, debug, extra_preargs, extra_postargs, @@ -272,19 +281,19 @@ else: # Build static libraries everywhere else libtype = 'static' - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) basename, ext = os.path.splitext(filename) if self.library_filename("x").startswith('lib'): # strip 'lib' prefix; this is kludgy if some platform uses @@ -294,5 +303,3 @@ else: self.create_static_lib( objects, basename, output_dir, debug, target_lang ) - - diff --git a/awx/lib/site-packages/setuptools/command/build_py.py b/awx/lib/site-packages/setuptools/command/build_py.py index 090b44d265..98080694ad 100644 --- a/awx/lib/site-packages/setuptools/command/build_py.py +++ b/awx/lib/site-packages/setuptools/command/build_py.py @@ -1,10 +1,10 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig import os import sys import fnmatch import textwrap -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob try: from setuptools.lib2to3_ex import Mixin2to3 @@ -13,7 +13,8 @@ except ImportError: def run_2to3(self, files, doctests=True): "do nothing" -class build_py(_build_py, Mixin2to3): + +class build_py(orig.build_py, Mixin2to3): """Enhanced 'build_py' command that includes data files with packages The data files are specified via a 'package_data' argument to 'setup()'. @@ -22,11 +23,14 @@ class build_py(_build_py, Mixin2to3): Also, this version of the 'build_py' command allows you to specify both 'py_modules' and 'packages' in the same setup operation. """ + def finalize_options(self): - _build_py.finalize_options(self) + orig.build_py.finalize_options(self) self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] self.__updated_files = [] self.__doctests_2to3 = [] @@ -48,16 +52,17 @@ class build_py(_build_py, Mixin2to3): # Only compile actual .py files, using our base class' idea of what our # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) def __getattr__(self, attr): - if attr=='data_files': # lazily compute data files + if attr == 'data_files': # lazily compute data files self.data_files = files = self._get_data_files() return files - return _build_py.__getattr__(self,attr) + return orig.build_py.__getattr__(self, attr) def build_module(self, module, module_file, package): - outfile, copied = _build_py.build_module(self, module, module_file, package) + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) if copied: self.__updated_files.append(outfile) return outfile, copied @@ -74,12 +79,12 @@ class build_py(_build_py, Mixin2to3): build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Length of path to strip from found files - plen = len(src_dir)+1 + plen = len(src_dir) + 1 # Strip directory from globbed filenames filenames = [ file[plen:] for file in self.find_data_files(package, src_dir) - ] + ] data.append((package, src_dir, build_dir, filenames)) return data @@ -102,7 +107,8 @@ class build_py(_build_py, Mixin2to3): srcfile = os.path.join(src_dir, filename) outf, copied = self.copy_file(srcfile, target) srcfile = os.path.abspath(srcfile) - if copied and srcfile in self.distribution.convert_2to3_doctests: + if (copied and + srcfile in self.distribution.convert_2to3_doctests): self.__doctests_2to3.append(outf) def analyze_manifest(self): @@ -117,21 +123,22 @@ class build_py(_build_py, Mixin2to3): self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') for path in ei_cmd.filelist.files: - d,f = os.path.split(assert_relative(path)) + d, f = os.path.split(assert_relative(path)) prev = None oldf = f - while d and d!=prev and d not in src_dirs: + while d and d != prev and d not in src_dirs: prev = d d, df = os.path.split(d) f = os.path.join(df, f) if d in src_dirs: - if path.endswith('.py') and f==oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d],[]).append(path) + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) - def get_data_files(self): pass # kludge 2.4 for lazy computation + def get_data_files(self): + pass # kludge 2.4 for lazy computation - if sys.version<"2.4": # Python 2.4 already has this code + if sys.version < "2.4": # Python 2.4 already has this code def get_outputs(self, include_bytecode=1): """Return complete list of files copied to the build directory @@ -140,11 +147,11 @@ class build_py(_build_py, Mixin2to3): needed for the 'install_lib' command to do its job properly, and to generate a correct installation manifest.) """ - return _build_py.get_outputs(self, include_bytecode) + [ + return orig.build_py.get_outputs(self, include_bytecode) + [ os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files + for package, src_dir, build_dir, filenames in self.data_files for filename in filenames - ] + ] def check_package(self, package, package_dir): """Check namespace packages' __init__ for declare_namespace""" @@ -153,36 +160,37 @@ class build_py(_build_py, Mixin2to3): except KeyError: pass - init_py = _build_py.check_package(self, package, package_dir) + init_py = orig.build_py.check_package(self, package, package_dir) self.packages_checked[package] = init_py if not init_py or not self.distribution.namespace_packages: return init_py for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): + if pkg == package or pkg.startswith(package + '.'): break else: return init_py - f = open(init_py,'rbU') + f = open(init_py, 'rbU') if 'declare_namespace'.encode() not in f.read(): - from distutils import log - log.warn( - "WARNING: %s is a namespace package, but its __init__.py does\n" - "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n", package + from distutils.errors import DistutilsError + + raise DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) ) f.close() return init_py def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) + self.packages_checked = {} + orig.build_py.initialize_options(self) def get_package_dir(self, package): - res = _build_py.get_package_dir(self, package) + res = orig.build_py.get_package_dir(self, package) if self.distribution.src_root is not None: return os.path.join(self.distribution.src_root, res) return res @@ -202,7 +210,7 @@ class build_py(_build_py, Mixin2to3): seen = {} return [ f for f in files if f not in bad - and f not in seen and seen.setdefault(f,1) # ditch dupes + and f not in seen and seen.setdefault(f, 1) # ditch dupes ] @@ -210,6 +218,7 @@ def assert_relative(path): if not os.path.isabs(path): return path from distutils.errors import DistutilsSetupError + msg = textwrap.dedent(""" Error: setup script specifies an absolute path: diff --git a/awx/lib/site-packages/setuptools/command/develop.py b/awx/lib/site-packages/setuptools/command/develop.py index 1d500040d0..368b64fed7 100644 --- a/awx/lib/site-packages/setuptools/command/develop.py +++ b/awx/lib/site-packages/setuptools/command/develop.py @@ -1,9 +1,14 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path, subst_vars -from pkg_resources import Distribution, PathMetadata, normalize_path +from distutils.util import convert_path from distutils import log from distutils.errors import DistutilsError, DistutilsOptionError -import os, sys, setuptools, glob +import os +import glob + +from pkg_resources import Distribution, PathMetadata, normalize_path +from setuptools.command.easy_install import easy_install +from setuptools.compat import PY3 +import setuptools + class develop(easy_install): """Set up package for development""" @@ -32,59 +37,56 @@ class develop(easy_install): self.egg_path = None easy_install.initialize_options(self) self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - + self.always_copy_from = '.' # always copy eggs installed in curdir def finalize_options(self): ei = self.get_finalized_command("egg_info") if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) + template = "Please rename %r to %r before using 'develop'" + args = ei.egg_info, ei.broken_egg_info + raise DistutilsError(template % args) self.args = [ei.egg_name] - - - easy_install.finalize_options(self) self.expand_basedirs() self.expand_dirs() # pick up setup-dir .egg files only: no .egg-info self.package_index.scan(glob.glob('*.egg')) - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') + self.egg_link = os.path.join(self.install_dir, ei.egg_name + + '.egg-link') self.egg_base = ei.egg_base if self.egg_path is None: self.egg_path = os.path.abspath(ei.egg_base) target = normalize_path(self.egg_base) - if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: + egg_path = normalize_path(os.path.join(self.install_dir, + self.egg_path)) + if egg_path != target: raise DistutilsOptionError( "--egg-path must be a relative path from the install" - " directory to "+target - ) + " directory to " + target + ) # Make a distribution for the package's source self.dist = Distribution( target, PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name + project_name=ei.egg_name ) - p = self.egg_base.replace(os.sep,'/') - if p!= os.curdir: - p = '../' * (p.count('/')+1) + p = self.egg_base.replace(os.sep, '/') + if p != os.curdir: + p = '../' * (p.count('/') + 1) self.setup_path = p p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): + if p != normalize_path(os.curdir): raise DistutilsOptionError( "Can't get a consistent path to setup script from" " installation directory", p, normalize_path(os.curdir)) def install_for_development(self): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): + if PY3 and getattr(self.distribution, 'use_2to3', False): # If we run 2to3 we can not do this inplace: # Ensure metadata is up-to-date @@ -99,12 +101,13 @@ class develop(easy_install): self.reinitialize_command('build_ext', inplace=0) self.run_command('build_ext') - + # Fixup egg-link and easy-install.pth ei_cmd = self.get_finalized_command("egg_info") self.egg_path = build_path self.dist.location = build_path - self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX + # XXX + self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) else: # Without 2to3 inplace works fine: self.run_command('egg_info') @@ -112,7 +115,7 @@ class develop(easy_install): # Build extensions in-place self.reinitialize_command('build_ext', inplace=1) self.run_command('build_ext') - + self.install_site_py() # ensure that target dir is site-safe if setuptools.bootstrap_install_from: self.easy_install(setuptools.bootstrap_install_from) @@ -121,21 +124,21 @@ class develop(easy_install): # create an .egg-link in the installation dir, pointing to our egg log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) if not self.dry_run: - f = open(self.egg_link,"w") + f = open(self.egg_link, "w") f.write(self.egg_path + "\n" + self.setup_path) f.close() # postprocess the installed distro, fixing up .pth, installing scripts, # and handling requirements self.process_distribution(None, self.dist, not self.no_deps) - def uninstall_link(self): if os.path.exists(self.egg_link): log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) egg_link_file = open(self.egg_link) contents = [line.rstrip() for line in egg_link_file] egg_link_file.close() - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): + if contents not in ([self.egg_path], + [self.egg_path, self.setup_path]): log.warn("Link points to %s: uninstall aborted", contents) return if not self.dry_run: @@ -149,7 +152,7 @@ class develop(easy_install): def install_egg_scripts(self, dist): if dist is not self.dist: # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) + return easy_install.install_egg_scripts(self, dist) # create wrapper scripts in the script dir, pointing to dist.scripts @@ -160,8 +163,7 @@ class develop(easy_install): for script_name in self.distribution.scripts or []: script_path = os.path.abspath(convert_path(script_name)) script_name = os.path.basename(script_path) - f = open(script_path,'rU') + f = open(script_path, 'rU') script_text = f.read() f.close() self.install_script(dist, script_name, script_text, script_path) - diff --git a/awx/lib/site-packages/setuptools/command/easy_install.py b/awx/lib/site-packages/setuptools/command/easy_install.py index 08ebf3e589..e057b508e9 100644 --- a/awx/lib/site-packages/setuptools/command/easy_install.py +++ b/awx/lib/site-packages/setuptools/command/easy_install.py @@ -12,6 +12,14 @@ __ https://pythonhosted.org/setuptools/easy_install.html """ +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import DistutilsArgError, DistutilsOptionError, \ + DistutilsError, DistutilsPlatformError +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re import sys import os import zipimport @@ -26,44 +34,43 @@ import textwrap import warnings import site import struct -from glob import glob -from distutils import log, dir_util +import contextlib +import subprocess +import shlex +import io -import pkg_resources -from setuptools import Command, _dont_write_bytecode +from setuptools import Command from setuptools.sandbox import run_setup from setuptools.py31compat import get_path, get_config_vars - -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS from setuptools.command import setopt from setuptools.archive_util import unpack_archive from setuptools.package_index import PackageIndex from setuptools.package_index import URL_SCHEME from setuptools.command import bdist_egg, egg_info from setuptools.compat import (iteritems, maxsize, basestring, unicode, - reraise) + reraise, PY2, PY3) from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, get_distribution, find_distributions, Environment, Requirement, Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, VersionConflict, DEVELOP_DIST, ) +import pkg_resources + +# Turn on PEP440Warnings +warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) -sys_executable = os.environ.get('__VENV_LAUNCHER__', - os.path.normpath(sys.executable)) __all__ = [ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', 'main', 'get_exe_prefixes', ] + def is_64bit(): return struct.calcsize("P") == 8 + def samefile(p1, p2): both_exist = os.path.exists(p1) and os.path.exists(p2) use_samefile = hasattr(os.path, 'samefile') and both_exist @@ -73,9 +80,11 @@ def samefile(p1, p2): norm_p2 = os.path.normpath(os.path.normcase(p2)) return norm_p1 == norm_p2 -if sys.version_info <= (3,): + +if PY2: def _to_ascii(s): return s + def isascii(s): try: unicode(s, 'ascii') @@ -85,6 +94,7 @@ if sys.version_info <= (3,): else: def _to_ascii(s): return s.encode('ascii') + def isascii(s): try: s.encode('ascii') @@ -92,6 +102,7 @@ else: except UnicodeError: return False + class easy_install(Command): """Manage a download/build/install process""" description = "Find/get/install Python packages" @@ -109,22 +120,22 @@ class easy_install(Command): ("index-url=", "i", "base URL of Python Package Index"), ("find-links=", "f", "additional URL(s) to search for packages"), ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), + "download/extract/build in DIR; keep the results"), ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), ('record=', None, - "filename in which to record list of installed files"), + "filename in which to record list of installed files"), ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), + ('site-dirs=', 'S', "list of directories where .pth files work"), ('editable', 'e', "Install specified packages in editable form"), ('no-deps', 'N', "don't install dependencies"), ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), + "allow building eggs from local checkouts"), ('version', None, "print version information and exit"), ('no-find-links', None, - "Don't load find-links defined in packages being installed") + "Don't load find-links defined in packages being installed") ] boolean_options = [ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', @@ -158,10 +169,10 @@ class easy_install(Command): self.editable = self.no_deps = self.allow_hosts = None self.root = self.prefix = self.no_report = None self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib self.install_scripts = None self.install_data = None self.install_base = None @@ -196,7 +207,8 @@ class easy_install(Command): if os.path.exists(filename) or os.path.islink(filename): log.info("Deleting %s", filename) if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): + if (os.path.isdir(filename) and + not os.path.islink(filename)): rmtree(filename) else: os.unlink(filename) @@ -229,7 +241,7 @@ class easy_install(Command): self.config_vars['usersite'] = self.install_usersite # fix the install_dir if "--user" was used - #XXX: duplicate of the code in the setup command + # XXX: duplicate of the code in the setup command if self.user and site.ENABLE_USER_SITE: self.create_home_path() if self.install_userbase is None: @@ -244,7 +256,8 @@ class easy_install(Command): self.expand_basedirs() self.expand_dirs() - self._expand('install_dir','script_dir','build_directory','site_dirs') + self._expand('install_dir', 'script_dir', 'build_directory', + 'site_dirs') # If a non-default installation directory was specified, default the # script directory to match it. if self.script_dir is None: @@ -256,12 +269,12 @@ class easy_install(Command): # Let install_dir get set by install_lib command, which in turn # gets its info from the install command, and takes into account # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') ) # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') ) if self.user and self.install_purelib: @@ -275,18 +288,20 @@ class easy_install(Command): self.all_site_dirs = get_site_dirs() if self.site_dirs is not None: site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') ] for d in site_dirs: if not os.path.isdir(d): log.warn("%s (in --site-dirs) does not exist", d) elif normalize_path(d) not in normpath: raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" + d + " (in --site-dirs) is not on sys.path" ) else: self.all_site_dirs.append(normalize_path(d)) - if not self.editable: self.check_site_dir() + if not self.editable: + self.check_site_dir() self.index_url = self.index_url or "https://pypi.python.org/simple" self.shadow_path = self.all_site_dirs[:] for path_item in self.install_dir, normalize_path(self.script_dir): @@ -299,9 +314,9 @@ class easy_install(Command): hosts = ['*'] if self.package_index is None: self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path, hosts=hosts, + self.index_url, search_path=self.shadow_path, hosts=hosts, ) - self.local_index = Environment(self.shadow_path+sys.path) + self.local_index = Environment(self.shadow_path + sys.path) if self.find_links is not None: if isinstance(self.find_links, basestring): @@ -309,14 +324,15 @@ class easy_install(Command): else: self.find_links = [] if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path+sys.path) + self.package_index.scan_egg_links(self.shadow_path + sys.path) if not self.no_find_links: self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): try: self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError + if not (0 <= self.optimize <= 2): + raise ValueError except ValueError: raise DistutilsOptionError("--optimize must be 0, 1, or 2") @@ -348,7 +364,7 @@ class easy_install(Command): """Calls `os.path.expanduser` on install dirs.""" self._expand_attrs(['install_purelib', 'install_platlib', 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) + 'install_scripts', 'install_data', ]) def run(self): if self.verbose != self.distribution.verbose: @@ -358,11 +374,12 @@ class easy_install(Command): self.easy_install(spec, not self.no_deps) if self.record: outputs = self.outputs - if self.root: # strip any package prefix + if self.root: # strip any package prefix root_len = len(self.root) for counter in range(len(outputs)): outputs[counter] = outputs[counter][root_len:] from distutils import file_util + self.execute( file_util.write_file, (self.record, outputs), "writing list of installed files to '%s'" % @@ -390,7 +407,7 @@ class easy_install(Command): """Verify that self.install_dir is .pth-capable dir, if needed""" instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') + pth_file = os.path.join(instdir, 'easy-install.pth') # Is it a configured, PYTHONPATH, implicit, or explicit site dir? is_site_dir = instdir in self.all_site_dirs @@ -400,13 +417,14 @@ class easy_install(Command): is_site_dir = self.check_pth_processing() else: # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' + testfile = self.pseudo_tempname() + '.write-test' test_exists = os.path.exists(testfile) try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() os.unlink(testfile) - except (OSError,IOError): + except (OSError, IOError): self.cant_write_to_target() if not is_site_dir and not self.multi_version: @@ -419,84 +437,94 @@ class easy_install(Command): else: self.pth_file = None - PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep) + PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]): # only PYTHONPATH dirs need a site.py, so pretend it's there self.sitepy_installed = True elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file self.install_dir = instdir + __cant_write_msg = textwrap.dedent(""" + can't create or remove files in install directory + + The following error occurred while trying to add or remove files in the + installation directory: + + %s + + The installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + """).lstrip() + + __not_exists_id = textwrap.dedent(""" + This directory does not currently exist. Please create it and try again, or + choose a different installation directory (using the -d or --install-dir + option). + """).lstrip() + + __access_msg = textwrap.dedent(""" + Perhaps your account does not have write access to this directory? If the + installation directory is a system-owned directory, you may need to sign in + as the administrator or "root" account. If you do not have administrative + access to this machine, you may wish to choose a different installation + directory, preferably one that is listed in your PYTHONPATH environment + variable. + + For information on other options, you may wish to consult the + documentation at: + + https://pythonhosted.org/setuptools/easy_install.html + + Please make the appropriate changes for your system and try again. + """).lstrip() + def cant_write_to_target(self): - template = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" - msg = template % (sys.exc_info()[1], self.install_dir,) + msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" + msg += '\n' + self.__not_exists_id else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - https://pythonhosted.org/setuptools/easy_install.html - -Please make the appropriate changes for your system and try again. -""" + msg += '\n' + self.__access_msg raise DistutilsError(msg) def check_pth_processing(self): """Empirically verify whether .pth files are supported in inst. dir""" instdir = self.install_dir log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' ok_exists = os.path.exists(ok_file) try: - if ok_exists: os.unlink(ok_file) + if ok_exists: + os.unlink(ok_file) dirname = os.path.dirname(ok_file) if not os.path.exists(dirname): os.makedirs(dirname) - f = open(pth_file,'w') - except (OSError,IOError): + f = open(pth_file, 'w') + except (OSError, IOError): self.cant_write_to_target() else: try: - f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,)) + f.write("import os; f = open(%r, 'w'); f.write('OK'); " + "f.close()\n" % (ok_file,)) f.close() - f=None + f = None executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + if (basename.lower() == 'python.exe' and + os.path.exists(alt)): # use pythonw.exe to avoid opening a console window executable = alt from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) + + spawn([executable, '-E', '-c', 'pass'], 0) if os.path.exists(ok_file): log.info( @@ -525,7 +553,7 @@ Please make the appropriate changes for your system and try again. continue self.install_script( dist, script_name, - dist.get_metadata('scripts/'+script_name) + dist.get_metadata('scripts/' + script_name) ) self.install_wrapper_scripts(dist) @@ -533,7 +561,7 @@ Please make the appropriate changes for your system and try again. if os.path.isdir(path): for base, dirs, files in os.walk(path): for filename in files: - self.outputs.append(os.path.join(base,filename)) + self.outputs.append(os.path.join(base, filename)) else: self.outputs.append(path) @@ -545,7 +573,7 @@ Please make the appropriate changes for your system and try again. % (spec,) ) - def check_editable(self,spec): + def check_editable(self, spec): if not self.editable: return @@ -558,15 +586,17 @@ Please make the appropriate changes for your system and try again. def easy_install(self, spec, deps=False): tmpdir = tempfile.mkdtemp(prefix="easy_install-") download = None - if not self.editable: self.install_site_py() + if not self.editable: + self.install_site_py() try: - if not isinstance(spec,Requirement): + if not isinstance(spec, Requirement): if URL_SCHEME(spec): # It's a url, download it to tmpdir and process self.not_editable(spec) download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) + return self.install_item(None, download, tmpdir, deps, + True) elif os.path.exists(spec): # Existing file or directory, just process it directly @@ -577,15 +607,15 @@ Please make the appropriate changes for your system and try again. self.check_editable(spec) dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy, - self.local_index + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index ) if dist is None: msg = "Could not find suitable distribution for %r" % spec if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" + msg += " (--always-copy skips system and development eggs)" raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: + elif dist.precedence == DEVELOP_DIST: # .egg-info dists don't need installing, just process deps self.process_distribution(spec, dist, deps, "Using") return dist @@ -612,10 +642,10 @@ Please make the appropriate changes for your system and try again. # at this point, we know it's a local .egg, we just don't know if # it's already installed. for dist in self.local_index[spec.project_name]: - if dist.location==download: + if dist.location == download: break else: - install_needed = True # it's not in the local index + install_needed = True # it's not in the local index log.info("Processing %s", os.path.basename(download)) @@ -644,6 +674,8 @@ Please make the appropriate changes for your system and try again. def process_distribution(self, requirement, dist, deps=True, *info): self.update_pth(dist) self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) self.local_index.add(dist) self.install_egg_scripts(dist) self.installed_projects[dist.key] = dist @@ -670,17 +702,12 @@ Please make the appropriate changes for your system and try again. distros = WorkingSet([]).resolve( [requirement], self.local_index, self.easy_install ) - except DistributionNotFound: - e = sys.exc_info()[1] + except DistributionNotFound as e: raise DistutilsError( "Could not find required distribution %s" % e.args ) - except VersionConflict: - e = sys.exc_info()[1] - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) + except VersionConflict as e: + raise DistutilsError(e.report()) if self.always_copy or self.always_copy_from: # Force all the relevant distros to be copied or activated for dist in distros: @@ -700,17 +727,18 @@ Please make the appropriate changes for your system and try again. def maybe_move(self, spec, dist_filename, setup_base): dst = os.path.join(self.build_directory, spec.key) if os.path.exists(dst): - msg = "%r already exists in %s; build directory %s will not be kept" + msg = ("%r already exists in %s; build directory %s will not be " + "kept") log.warn(msg, spec.key, self.build_directory, setup_base) return setup_base if os.path.isdir(dist_filename): setup_base = dist_filename else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) if os.path.isdir(dist_filename): # if the only thing there is a directory, move it instead setup_base = dist_filename @@ -720,7 +748,7 @@ Please make the appropriate changes for your system and try again. def install_wrapper_scripts(self, dist): if not self.exclude_scripts: - for args in get_script_args(dist): + for args in ScriptWriter.best().get_args(dist): self.write_script(*args) def install_script(self, dist, script_name, script_text, dev_path=None): @@ -728,32 +756,31 @@ Please make the appropriate changes for your system and try again. spec = str(dist.as_requirement()) is_script = is_python_script(script_text, script_name) - def get_template(filename): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - - These templates use triple-quotes to escape variable - substitutions so the scripts get the 2to3 treatment when build - on Python 3. The templates cannot use triple-quotes naturally. - """ - raw_bytes = resource_string('setuptools', template_name) - template_str = raw_bytes.decode('utf-8') - clean_template = template_str.replace('"""', '') - return clean_template - if is_script: - template_name = 'script template.py' - if dev_path: - template_name = template_name.replace('.py', ' (dev).py') - script_text = (get_script_header(script_text) + - get_template(template_name) % locals()) + script_text = (ScriptWriter.get_header(script_text) + + self._load_template(dev_path) % locals()) self.write_script(script_name, _to_ascii(script_text), 'b') + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://bitbucket.org/pypa/setuptools/issue/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + def write_script(self, script_name, contents, mode="t", blockers=()): """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) log.info("Installing %s script to %s", script_name, self.script_dir) target = os.path.join(self.script_dir, script_name) self.add_output(target) @@ -763,10 +790,10 @@ Please make the appropriate changes for your system and try again. ensure_directory(target) if os.path.exists(target): os.unlink(target) - f = open(target,"w"+mode) + f = open(target, "w" + mode) f.write(contents) f.close() - chmod(target, 0x1FF-mask) # 0777 + chmod(target, 0o777 - mask) def install_eggs(self, spec, dist_filename, tmpdir): # .egg dirs or files are already built, so just return them @@ -782,7 +809,7 @@ Please make the appropriate changes for your system and try again. elif os.path.isdir(dist_filename): setup_base = os.path.abspath(dist_filename) - if (setup_base.startswith(tmpdir) # something we downloaded + if (setup_base.startswith(tmpdir) # something we downloaded and self.build_directory and spec is not None): setup_base = self.maybe_move(spec, dist_filename, setup_base) @@ -793,11 +820,13 @@ Please make the appropriate changes for your system and try again. setups = glob(os.path.join(setup_base, '*', 'setup.py')) if not setups: raise DistutilsError( - "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) ) - if len(setups)>1: + if len(setups) > 1: raise DistutilsError( - "Multiple setup scripts in %s" % os.path.abspath(dist_filename) + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) ) setup_script = setups[0] @@ -810,13 +839,15 @@ Please make the appropriate changes for your system and try again. def egg_distribution(self, egg_path): if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) else: metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) + return Distribution.from_filename(egg_path, metadata=metadata) def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) + destination = os.path.join(self.install_dir, + os.path.basename(egg_path)) destination = os.path.abspath(destination) if not self.dry_run: ensure_directory(destination) @@ -826,24 +857,33 @@ Please make the appropriate changes for your system and try again. if os.path.isdir(destination) and not os.path.islink(destination): dir_util.remove_tree(destination, dry_run=self.dry_run) elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" + self.execute(os.unlink, (destination,), "Removing " + + destination) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute(f, (egg_path, destination), + (m + " %s to %s") % + (os.path.basename(egg_path), + os.path.dirname(destination))) + update_dist_caches(destination, + fix_zipimporter_caches=new_dist_is_zipped) + except: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise self.add_output(destination) return self.egg_distribution(destination) @@ -858,30 +898,33 @@ Please make the appropriate changes for your system and try again. # Create a dummy distribution object until we build the real distro dist = Distribution( None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform=get_platform(), + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), ) # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') + egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() + + '.egg') egg_tmp = egg_path + '.tmp' _egg_info = os.path.join(egg_tmp, 'EGG-INFO') pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX self.exe_to_egg(dist_filename, egg_tmp) # Write EGG-INFO/PKG-INFO if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') + f = open(pkg_inf, 'w') f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): + for k, v in cfg.items('metadata'): if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) f.close() - script_dir = os.path.join(_egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] + script_dir = os.path.join(_egg_info, 'scripts') + # delete entry-point scripts to avoid duping + self.delete_blockers( + [os.path.join(script_dir, args[0]) for args in + ScriptWriter.get_args(dist)] ) # Build .egg file from tmpdir bdist_egg.make_zipfile( @@ -897,11 +940,12 @@ Please make the appropriate changes for your system and try again. to_compile = [] native_libs = [] top_level = {} - def process(src,dst): + + def process(src, dst): s = src.lower() - for old,new in prefixes: + for old, new in prefixes: if s.startswith(old): - src = new+src[len(old):] + src = new + src[len(old):] parts = src.split('/') dst = os.path.join(egg_tmp, *parts) dl = dst.lower() @@ -909,97 +953,104 @@ Please make the appropriate changes for your system and try again. parts[-1] = bdist_egg.strip_module(parts[-1]) top_level[os.path.splitext(parts[0])[0]] = 1 native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': + elif dl.endswith('.py') and old != 'SCRIPTS/': top_level[os.path.splitext(parts[0])[0]] = 1 to_compile.append(dst) return dst if not src.endswith('.pth'): log.warn("WARNING: can't process %s", src) return None + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile unpack_archive(dist_filename, egg_tmp, process) stubs = [] for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's + if res.lower().endswith('.pyd'): # create stubs for .pyd's parts = res.split('/') resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' pyfile = os.path.join(egg_tmp, *parts) to_compile.append(pyfile) stubs.append(pyfile) bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - for name in 'top_level','native_libs': + for name in 'top_level', 'native_libs': if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') if not os.path.exists(txt): - f = open(txt,'w') - f.write('\n'.join(locals()[name])+'\n') + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') f.close() + __mv_warning = textwrap.dedent(""" + Because this distribution was installed --multi-version, before you can + import modules from this package in an application, you will need to + 'import pkg_resources' and then use a 'require()' call similar to one of + these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher + """).lstrip() + + __id_warning = textwrap.dedent(""" + Note also that the installation directory must be on sys.path at runtime for + this to work. (e.g. by being the application's script directory, by being on + PYTHONPATH, or by being added to sys.path by your code.) + """) + def installation_report(self, req, dist, what="Installed"): """Helpful installation message for display to package users""" msg = "\n%(what)s %(eggloc)s%(extras)s" if self.multi_version and not self.no_report: - msg += """ + msg += '\n' + self.__mv_warning + if self.install_dir not in map(normalize_path, sys.path): + msg += '\n' + self.__id_warning -Because this distribution was installed --multi-version, before you can -import modules from this package in an application, you will need to -'import pkg_resources' and then use a 'require()' call similar to one of -these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" eggloc = dist.location name = dist.project_name version = dist.version - extras = '' # TODO: self.report_extras(req, dist) + extras = '' # TODO: self.report_extras(req, dist) return msg % locals() + __editable_msg = textwrap.dedent(""" + Extracted editable version of %(spec)s to %(dirname)s + + If it uses setuptools in its setup script, you can activate it in + "development" mode by going to that directory and running:: + + %(python)s setup.py develop + + See the setuptools documentation for the "develop" command for more info. + """).lstrip() + def report_editable(self, spec, setup_script): dirname = os.path.dirname(setup_script) python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() + return '\n' + self.__editable_msg % locals() def run_setup(self, setup_script, setup_base, args): sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) sys.modules.setdefault('distutils.command.egg_info', egg_info) args = list(args) - if self.verbose>2: + if self.verbose > 2: v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') if self.dry_run: - args.insert(0,'-n') + args.insert(0, '-n') log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) ) try: run_setup(setup_script, args) - except SystemExit: - v = sys.exc_info()[1] + except SystemExit as v: raise DistutilsError("Setup script exited with %s" % (v.args[0],)) def build_and_install(self, setup_script, setup_base): @@ -1020,11 +1071,11 @@ See the setuptools documentation for the "develop" command for more info. eggs.append(self.install_egg(dist.location, setup_base)) if not eggs and not self.dry_run: log.warn("No eggs found in %s (setup script problem?)", - dist_dir) + dist_dir) return eggs finally: rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity + log.set_verbosity(self.verbose) # restore our log verbosity def _set_fetcher_options(self, base): """ @@ -1034,7 +1085,7 @@ See the setuptools documentation for the "develop" command for more info. are available to that command as well. """ # find the fetch options from easy_install and write them out - # to the setup.cfg file. + # to the setup.cfg file. ei_opts = self.distribution.get_option_dict('easy_install').copy() fetch_directives = ( 'find_links', 'site_dirs', 'index_url', 'optimize', @@ -1042,7 +1093,8 @@ See the setuptools documentation for the "develop" command for more info. ) fetch_options = {} for key, val in ei_opts.items(): - if key not in fetch_directives: continue + if key not in fetch_directives: + continue fetch_options[key.replace('_', '-')] = val[1] # create a settings dictionary suitable for `edit_config` settings = dict(easy_install=fetch_options) @@ -1053,7 +1105,7 @@ See the setuptools documentation for the "develop" command for more info. if self.pth_file is None: return - for d in self.pth_file[dist.key]: # drop old entries + for d in self.pth_file[dist.key]: # drop old entries if self.multi_version or d.location != dist.location: log.info("Removing %s from easy-install.pth file", d) self.pth_file.remove(d) @@ -1068,7 +1120,7 @@ See the setuptools documentation for the "develop" command for more info. ) else: log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry + self.pth_file.add(dist) # add new entry if dist.location not in self.shadow_path: self.shadow_path.append(dist.location) @@ -1076,19 +1128,20 @@ See the setuptools documentation for the "develop" command for more info. self.pth_file.save() - if dist.key=='setuptools': + if dist.key == 'setuptools': # Ensure that setuptools itself never becomes unavailable! # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') + f.write(self.pth_file.make_relative(dist.location) + '\n') f.close() def unpack_progress(self, src, dst): # Progress filter for unpacking log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run + return dst # only unpack-and-compile skips files for dry run def unpack_and_compile(self, egg_path, destination): to_compile = [] @@ -1099,22 +1152,23 @@ See the setuptools documentation for the "develop" command for more info. to_compile.append(dst) elif dst.endswith('.dll') or dst.endswith('.so'): to_chmod.append(dst) - self.unpack_progress(src,dst) + self.unpack_progress(src, dst) return not self.dry_run and dst or None unpack_archive(egg_path, destination, pf) self.byte_compile(to_compile) if not self.dry_run: for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0x16D) & 0xFED # 0555, 07755 + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 chmod(f, mode) def byte_compile(self, to_compile): - if _dont_write_bytecode: + if sys.dont_write_bytecode: self.warn('byte-compiling is disabled, skipping.') return from distutils.util import byte_compile + try: # try to make the byte compile messages quieter log.set_verbosity(self.verbose - 1) @@ -1126,38 +1180,41 @@ See the setuptools documentation for the "develop" command for more info. dry_run=self.dry_run ) finally: - log.set_verbosity(self.verbose) # restore original verbosity + log.set_verbosity(self.verbose) # restore original verbosity + + __no_default_msg = textwrap.dedent(""" + bad install directory or PYTHONPATH + + You are attempting to install a package to a directory that is not + on PYTHONPATH and which Python does not read ".pth" files from. The + installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + + and your PYTHONPATH environment variable currently contains: + + %r + + Here are some of your options for correcting the problem: + + * You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + + * You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + + * You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + + Please make the appropriate changes for your system and try again.""").lstrip() def no_default_version_msg(self): - template = """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations - -Please make the appropriate changes for your system and try again.""" - return template % (self.install_dir, os.environ.get('PYTHONPATH','')) + template = self.__no_default_msg + return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) def install_site_py(self): """Make sure there's a site.py in the target dir, if needed""" @@ -1171,10 +1228,10 @@ Please make the appropriate changes for your system and try again.""" if os.path.exists(sitepy): log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy,'rb') + f = open(sitepy, 'rb') current = f.read() # we want str, not bytes - if sys.version_info >= (3,): + if PY3: current = current.decode() f.close() @@ -1188,7 +1245,7 @@ Please make the appropriate changes for your system and try again.""" log.info("Creating %s", sitepy) if not self.dry_run: ensure_directory(sitepy) - f = open(sitepy,'wb') + f = open(sitepy, 'wb') f.write(source) f.close() self.byte_compile([sitepy]) @@ -1202,19 +1259,19 @@ Please make the appropriate changes for your system and try again.""" home = convert_path(os.path.expanduser("~")) for name, path in iteritems(self.config_vars): if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0700)" % path) - os.makedirs(path, 0x1C0) # 0700 + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', ), ) DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', ) def _expand(self, *attrs): @@ -1224,12 +1281,13 @@ Please make the appropriate changes for your system and try again.""" # Set default install_dir/scripts from --prefix config_vars = config_vars.copy() config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) + scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) from distutils.util import subst_vars + for attr in attrs: val = getattr(self, attr) if val is not None: @@ -1238,6 +1296,7 @@ Please make the appropriate changes for your system and try again.""" val = os.path.expanduser(val) setattr(self, attr, val) + def get_site_dirs(): # return a list of 'site' dirs sitedirs = [_f for _f in os.environ.get('PYTHONPATH', @@ -1251,10 +1310,10 @@ def get_site_dirs(): sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) + "lib", + "python" + sys.version[:3], + "site-packages"), + os.path.join(prefix, "lib", "site-python")]) else: sitedirs.extend( [prefix, os.path.join(prefix, "lib", "site-packages")] @@ -1274,7 +1333,8 @@ def get_site_dirs(): 'site-packages')) lib_paths = get_path('purelib'), get_path('platlib') for site_lib in lib_paths: - if site_lib not in sitedirs: sitedirs.append(site_lib) + if site_lib not in sitedirs: + sitedirs.append(site_lib) if site.ENABLE_USER_SITE: sitedirs.append(site.USER_SITE) @@ -1305,12 +1365,12 @@ def expand_paths(inputs): if not name.endswith('.pth'): # We only care about the .pth files continue - if name in ('easy-install.pth','setuptools.pth'): + if name in ('easy-install.pth', 'setuptools.pth'): # Ignore .pth files that we control continue # Read the .pth file - f = open(os.path.join(dirname,name)) + f = open(os.path.join(dirname, name)) lines = list(yield_lines(f)) f.close() @@ -1330,7 +1390,7 @@ def extract_wininst_cfg(dist_filename): Returns a ConfigParser.RawConfigParser, or None """ - f = open(dist_filename,'rb') + f = open(dist_filename, 'rb') try: endrec = zipfile._EndRecData(f) if endrec is None: @@ -1339,21 +1399,23 @@ def extract_wininst_cfg(dist_filename): prepended = (endrec[9] - endrec[5]) - endrec[6] if prepended < 12: # no wininst data here return None - f.seek(prepended-12) + f.seek(prepended - 12) from setuptools.compat import StringIO, ConfigParser import struct - tag, cfglen, bmlen = struct.unpack("<iii",f.read(12)) - if tag not in (0x1234567A, 0x1234567B): - return None # not a valid tag - f.seek(prepended-(12+cfglen)) - cfg = ConfigParser.RawConfigParser({'version':'','target_version':''}) + tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) + if tag not in (0x1234567A, 0x1234567B): + return None # not a valid tag + + f.seek(prepended - (12 + cfglen)) + cfg = ConfigParser.RawConfigParser( + {'version': '', 'target_version': ''}) try: part = f.read(cfglen) # part is in bytes, but we need to read up to the first null - # byte. - if sys.version_info >= (2,6): + # byte. + if sys.version_info >= (2, 6): null_byte = bytes([0]) else: null_byte = chr(0) @@ -1386,25 +1448,25 @@ def get_exe_prefixes(exe_filename): for info in z.infolist(): name = info.filename parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': + if len(parts) == 3 and parts[2] == 'PKG-INFO': if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) break if len(parts) != 2 or not name.endswith('.pth'): continue if name.endswith('-nspkg.pth'): continue - if parts[0].upper() in ('PURELIB','PLATLIB'): + if parts[0].upper() in ('PURELIB', 'PLATLIB'): contents = z.read(name) - if sys.version_info >= (3,): + if PY3: contents = contents.decode() for pth in yield_lines(contents): - pth = pth.strip().replace('\\','/') + pth = pth.strip().replace('\\', '/') if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) finally: z.close() - prefixes = [(x.lower(),y) for x, y in prefixes] + prefixes = [(x.lower(), y) for x, y in prefixes] prefixes.sort() prefixes.reverse() return prefixes @@ -1418,6 +1480,7 @@ def parse_requirement_arg(spec): "Not a URL, existing file, or requirement spec: %r" % (spec,) ) + class PthDistributions(Environment): """A .pth file with Distribution paths in it""" @@ -1437,7 +1500,7 @@ class PthDistributions(Environment): saw_import = False seen = dict.fromkeys(self.sitedirs) if os.path.isfile(self.filename): - f = open(self.filename,'rt') + f = open(self.filename, 'rt') for line in f: if line.startswith('import'): saw_import = True @@ -1449,17 +1512,17 @@ class PthDistributions(Environment): # skip non-existent paths, in case somebody deleted a package # manually, and duplicate paths as well path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) + os.path.join(self.basedir, path) ) if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) continue seen[path] = 1 f.close() if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers + self.dirty = True # ensure anything we touch has import wrappers while self.paths and not self.paths[-1].strip(): self.paths.pop() @@ -1468,7 +1531,7 @@ class PthDistributions(Environment): if not self.dirty: return - data = '\n'.join(map(self.make_relative,self.paths)) + data = '\n'.join(map(self.make_relative, self.paths)) if data: log.debug("Saving %s", self.filename) data = ( @@ -1482,7 +1545,7 @@ class PthDistributions(Environment): if os.path.islink(self.filename): os.unlink(self.filename) - f = open(self.filename,'wt') + f = open(self.filename, 'wt') f.write(data) f.close() @@ -1494,10 +1557,14 @@ class PthDistributions(Environment): def add(self, dist): """Add `dist` to the distribution map""" - if (dist.location not in self.paths and ( + new_path = ( + dist.location not in self.paths and ( dist.location not in self.sitedirs or - dist.location == os.getcwd() # account for '.' being in PYTHONPATH - )): + # account for '.' being in PYTHONPATH + dist.location == os.getcwd() + ) + ) + if new_path: self.paths.append(dist.location) self.dirty = True Environment.add(self, dist) @@ -1509,13 +1576,13 @@ class PthDistributions(Environment): self.dirty = True Environment.remove(self, dist) - def make_relative(self,path): + def make_relative(self, path): npath, last = os.path.split(normalize_path(path)) baselen = len(self.basedir) parts = [last] - sep = os.altsep=='/' and '/' or os.sep - while len(npath)>=baselen: - if npath==self.basedir: + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: parts.append(os.curdir) parts.reverse() return sep.join(parts) @@ -1524,59 +1591,203 @@ class PthDistributions(Environment): else: return path -def get_script_header(script_text, executable=sys_executable, wininst=False): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - if not isinstance(first_line_re.pattern, str): - first_line_re = re.compile(first_line_re.pattern.decode()) + return re.compile(first_line_re.pattern.decode()) - first = (script_text+'\n').splitlines()[0] - match = first_line_re.match(first) - options = '' - if match: - options = match.group(1) or '' - if options: options = ' '+options - if wininst: - executable = "python.exe" - else: - executable = nt_quote_arg(executable) - hdr = "#!%(executable)s%(options)s\n" % locals() - if not isascii(hdr): - # Non-ascii path to sys.executable, use -x to prevent warnings - if options: - if options.strip().startswith('-'): - options = ' -x'+options.strip()[1:] - # else: punt, we can't do it, let the warning happen anyway - else: - options = ' -x' - executable = fix_jython_executable(executable, options) - hdr = "#!%(executable)s%(options)s\n" % locals() - return hdr def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': + if func is os.remove and os.name == 'nt': chmod(arg, stat.S_IWRITE) return func(arg) et, ev, _ = sys.exc_info() - reraise(et, (ev[0], ev[1] + (" %s %s" % (func,arg)))) + reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) -def uncache_zipdir(path): - """Ensure that the importer caches dont have stale info for `path`""" - from zipimport import _zip_directory_cache as zdc - _uncache(path, zdc) - _uncache(path, sys.path_importer_cache) -def _uncache(path, cache): - if path in cache: - del cache[path] +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) else: - path = normalize_path(path) - for p in cache: - if normalize_path(p)==path: - del cache[p] - return + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 + # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + def is_python(text, filename='<string>'): "Is this string a valid Python script?" @@ -1587,94 +1798,181 @@ def is_python(text, filename='<string>'): else: return True + def is_sh(executable): """Determine if the specified executable is a .sh (contains a #! line)""" try: - fp = open(executable) - magic = fp.read(2) - fp.close() - except (OSError,IOError): return executable + with io.open(executable, encoding='latin-1') as fp: + magic = fp.read(2) + except (OSError, IOError): + return executable return magic == '#!' + def nt_quote_arg(arg): """Quote a command line argument according to Windows parsing rules""" + return subprocess.list2cmdline([arg]) - result = [] - needquote = False - nb = 0 - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - nb += 1 - elif c == '"': - # double preceding backslashes, then add a \" - result.append('\\' * (nb*2) + '\\"') - nb = 0 - else: - if nb: - result.append('\\' * nb) - nb = 0 - result.append(c) - - if nb: - result.append('\\' * nb) - - if needquote: - result.append('\\' * nb) # double the trailing backslashes - result.append('"') - - return ''.join(result) def is_python_script(script_text, filename): """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. """ if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python + return True # extension says it's Python if is_python(script_text, filename): - return True # it's syntactically valid Python + return True # it's syntactically valid Python if script_text.startswith('#!'): # It begins with a '#!' line, so check if 'python' is in it somewhere return 'python' in script_text.splitlines()[0].lower() - return False # Not any Python I can recognize + return False # Not any Python I can recognize + try: from os import chmod as _chmod except ImportError: # Jython compatibility - def _chmod(*args): pass + def _chmod(*args): + pass + def chmod(path, mode): log.debug("changing mode of %s to %o", path, mode) try: _chmod(path, mode) - except os.error: - e = sys.exc_info()[1] + except os.error as e: log.debug("chmod failed: %s", e) -def fix_jython_executable(executable, options): - if sys.platform.startswith('java') and is_sh(executable): - # Workaround for Jython is not needed on Linux systems. - import java - if java.lang.System.getProperty("os.name") == "Linux": - return executable - # Workaround Jython's sys.executable being a .sh (an invalid - # shebang line interpreter) - if options: +def fix_jython_executable(executable, options): + warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2) + + if not JythonCommandSpec.relevant(): + return executable + + cmd = CommandSpec.best().from_param(executable) + cmd.install_options(options) + return cmd.as_header().lstrip('#!').rstrip('\n') + + +class CommandSpec(list): + """ + A command spec for a #! header, specified as a list of arguments akin to + those passed to Popen. + """ + + options = [] + split_args = dict() + + @classmethod + def best(cls): + """ + Choose the best CommandSpec class based on environmental conditions. + """ + return cls if not JythonCommandSpec.relevant() else JythonCommandSpec + + @classmethod + def _sys_executable(cls): + _default = os.path.normpath(sys.executable) + return os.environ.get('__PYVENV_LAUNCHER__', _default) + + @classmethod + def from_param(cls, param): + """ + Construct a CommandSpec from a parameter to build_scripts, which may + be None. + """ + if isinstance(param, cls): + return param + if isinstance(param, list): + return cls(param) + if param is None: + return cls.from_environment() + # otherwise, assume it's a string. + return cls.from_string(param) + + @classmethod + def from_environment(cls): + return cls([cls._sys_executable()]) + + @classmethod + def from_string(cls, string): + """ + Construct a command spec from a simple string representing a command + line parseable by shlex.split. + """ + items = shlex.split(string, **cls.split_args) + return cls(items) + + def install_options(self, script_text): + self.options = shlex.split(self._extract_options(script_text)) + cmdline = subprocess.list2cmdline(self) + if not isascii(cmdline): + self.options[:0] = ['-x'] + + @staticmethod + def _extract_options(orig_script): + """ + Extract any options from the first line of the script. + """ + first = (orig_script + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = match.group(1) or '' if match else '' + return options.strip() + + def as_header(self): + return self._render(self + list(self.options)) + + @staticmethod + def _render(items): + cmdline = subprocess.list2cmdline(items) + return '#!' + cmdline + '\n' + +# For pbr compat; will be removed in a future version. +sys_executable = CommandSpec._sys_executable() + + +class WindowsCommandSpec(CommandSpec): + split_args = dict(posix=False) + + +class JythonCommandSpec(CommandSpec): + @classmethod + def relevant(cls): + return ( + sys.platform.startswith('java') + and + __import__('java').lang.System.getProperty('os.name') != 'Linux' + ) + + @classmethod + def from_environment(cls): + string = '"' + cls._sys_executable() + '"' + return cls.from_string(string) + + @classmethod + def from_string(cls, string): + return cls([string]) + + def as_header(self): + """ + Workaround Jython's sys.executable being a .sh (an invalid + shebang line interpreter) + """ + if not is_sh(self[0]): + return super(JythonCommandSpec, self).as_header() + + if self.options: # Can't apply the workaround, leave it broken log.warn( "WARNING: Unable to adapt shebang line for Jython," " the following script is NOT executable\n" " see http://bugs.jython.org/issue1112 for" " more information.") - else: - return '/usr/bin/env %s' % executable - return executable + return super(JythonCommandSpec, self).as_header() + + items = ['/usr/bin/env'] + self + list(self.options) + return self._render(items) class ScriptWriter(object): @@ -1695,39 +1993,81 @@ class ScriptWriter(object): ) """).lstrip() + command_spec_class = CommandSpec + @classmethod - def get_script_args(cls, dist, executable=sys_executable, wininst=False): + def get_script_args(cls, dist, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_args", DeprecationWarning) + writer = (WindowsScriptWriter if wininst else ScriptWriter).best() + header = cls.get_script_header("", executable, wininst) + return writer.get_args(dist, header) + + @classmethod + def get_script_header(cls, script_text, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_header", DeprecationWarning) + if wininst: + executable = "python.exe" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + @classmethod + def get_args(cls, dist, header=None): """ Yield write_script() argument tuples for a distribution's entrypoints """ - gen_class = cls.get_writer(wininst) + if header is None: + header = cls.get_header() spec = str(dist.as_requirement()) - header = get_script_header("", executable, wininst) for type_ in 'console', 'gui': group = type_ + '_scripts' for name, ep in dist.get_entry_map(group).items(): - script_text = gen_class.template % locals() - for res in gen_class._get_script_args(type_, name, header, + script_text = cls.template % locals() + for res in cls._get_script_args(type_, name, header, script_text): yield res @classmethod def get_writer(cls, force_windows): - if force_windows or sys.platform=='win32': - return WindowsScriptWriter.get_writer() - return cls + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return WindowsScriptWriter.best() if force_windows else cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter for this environment. + """ + return WindowsScriptWriter.best() if sys.platform == 'win32' else cls @classmethod def _get_script_args(cls, type_, name, header, script_text): # Simply write the stub with no extension. - yield (name, header+script_text) + yield (name, header + script_text) + + @classmethod + def get_header(cls, script_text="", executable=None): + """Create a #! line, getting options (if any) from script_text""" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() class WindowsScriptWriter(ScriptWriter): + command_spec_class = WindowsCommandSpec + @classmethod def get_writer(cls): + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return cls.best() + + @classmethod + def best(cls): """ - Get a script writer suitable for Windows + Select the best ScriptWriter suitable for Windows """ writer_lookup = dict( executable=WindowsExecutableLauncherWriter, @@ -1743,12 +2083,12 @@ class WindowsScriptWriter(ScriptWriter): ext = dict(console='.pya', gui='.pyw')[type_] if ext not in os.environ['PATHEXT'].lower().split(';'): warnings.warn("%s not listed in PATHEXT; scripts will not be " - "recognized as executables." % ext, UserWarning) + "recognized as executables." % ext, UserWarning) old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] old.remove(ext) header = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield name+ext, header+script_text, 't', blockers + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers @staticmethod def _adjust_header(type_, orig_header): @@ -1775,32 +2115,35 @@ class WindowsExecutableLauncherWriter(WindowsScriptWriter): """ For Windows, add a .py extension and an .exe launcher """ - if type_=='gui': + if type_ == 'gui': launcher_type = 'gui' ext = '-script.pyw' old = ['.pyw'] else: launcher_type = 'cli' ext = '-script.py' - old = ['.py','.pyc','.pyo'] + old = ['.py', '.pyc', '.pyo'] hdr = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield (name+ext, hdr+script_text, 't', blockers) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) yield ( - name+'.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode ) if not is_64bit(): # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for + # from detecting it as an installer (which it will for # launchers like easy_install.exe). Consider only # adding a manifest for launchers detected as installers. # See Distribute #143 for details. m_name = name + '.exe.manifest' yield (m_name, load_launcher_manifest(name), 't') + # for backward-compatibility get_script_args = ScriptWriter.get_script_args +get_script_header = ScriptWriter.get_script_header + def get_win_launcher(type): """ @@ -1811,7 +2154,7 @@ def get_win_launcher(type): Returns the executable as a byte string. """ launcher_fn = '%s.exe' % type - if platform.machine().lower()=='arm': + if platform.machine().lower() == 'arm': launcher_fn = launcher_fn.replace(".", "-arm.") if is_64bit(): launcher_fn = launcher_fn.replace(".", "-64.") @@ -1819,13 +2162,15 @@ def get_win_launcher(type): launcher_fn = launcher_fn.replace(".", "-32.") return resource_string('setuptools', launcher_fn) + def load_launcher_manifest(name): manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if sys.version_info[0] < 3: + if PY2: return manifest % vars() else: return manifest.decode('utf-8') % vars() + def rmtree(path, ignore_errors=False, onerror=auto_chmod): """Recursively delete a directory tree. @@ -1861,55 +2206,61 @@ def rmtree(path, ignore_errors=False, onerror=auto_chmod): except os.error: onerror(os.rmdir, path, sys.exc_info()) + def current_umask(): - tmp = os.umask(0x12) # 022 + tmp = os.umask(0o022) os.umask(tmp) return tmp + def bootstrap(): # This function is called when setuptools*.egg is run using /bin/sh import setuptools + argv0 = os.path.dirname(setuptools.__path__[0]) sys.argv[0] = argv0 sys.argv.append(argv0) main() + def main(argv=None, **kw): from setuptools import setup from setuptools.dist import Distribution - import distutils.core - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + + def _show_help(self, *args, **kw): + with _patch_usage(): + Distribution._show_help(self, *args, **kw) + + if argv is None: + argv = sys.argv[1:] + + with _patch_usage(): + setup( + script_args=['-q', 'easy_install', '-v'] + argv, + script_name=sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, **kw + ) + + +@contextlib.contextmanager +def _patch_usage(): + import distutils.core + USAGE = textwrap.dedent(""" + usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help + """).lstrip() def gen_usage(script_name): return USAGE % dict( script=os.path.basename(script_name), ) - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) + saved = distutils.core.gen_usage + distutils.core.gen_usage = gen_usage + try: + yield + finally: + distutils.core.gen_usage = saved diff --git a/awx/lib/site-packages/setuptools/command/egg_info.py b/awx/lib/site-packages/setuptools/command/egg_info.py index 5953aad4f6..a9940677ea 100644 --- a/awx/lib/site-packages/setuptools/command/egg_info.py +++ b/awx/lib/site-packages/setuptools/command/egg_info.py @@ -2,22 +2,30 @@ Create a distribution's .egg-info directory and contents""" +from distutils.filelist import FileList as _FileList +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist import os import re import sys -from setuptools import Command -import distutils.errors -from distutils import log -from setuptools.command.sdist import sdist -from setuptools.compat import basestring -from setuptools import svn_utils -from distutils.util import convert_path -from distutils.filelist import FileList as _FileList -from pkg_resources import (parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) -from setuptools.command.sdist import walk_revctrl +try: + from setuptools_svn import svn_utils +except ImportError: + pass +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.compat import basestring, PY3, StringIO +from setuptools.command.sdist import walk_revctrl +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils + +from pkg_resources import packaging class egg_info(Command): description = "create a distribution's .egg-info directory" @@ -26,11 +34,11 @@ class egg_info(Command): ('egg-base=', 'e', "directory containing .egg-info directories" " (default: top of the source tree)"), ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), + "Add subversion revision ID to version number"), ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), ('tag-build=', 'b', "Specify explicit tag to add to version number"), ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), + "Don't add subversion revision ID [default]"), ('no-date', 'D', "Don't include date stamp [default]"), ] @@ -51,6 +59,7 @@ class egg_info(Command): def save_version_info(self, filename): from setuptools.command.setopt import edit_config + values = dict( egg_info=dict( tag_svn_revision=0, @@ -65,25 +74,32 @@ class egg_info(Command): self.vtags = self.tags() self.egg_version = self.tagged_version() + parsed_version = parse_version(self.egg_version) + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = ( + "%s==%s" if is_version else "%s===%s" + ) list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) + parse_requirements(spec % (self.egg_name, self.egg_version)) ) except ValueError: raise distutils.errors.DistutilsOptionError( "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) + (self.egg_name, self.egg_version) ) if self.egg_base is None: dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) + self.egg_base = (dirs or {}).get('', os.curdir) self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' + self.egg_info = to_filename(self.egg_name) + '.egg-info' if self.egg_base != os.curdir: self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() + if '-' in self.egg_name: + self.check_broken_egg_info() # Set package version for the benefit of dumber commands # (e.g. sdist, bdist_wininst, etc.) @@ -95,7 +111,7 @@ class egg_info(Command): # to the version info # pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): + if pd is not None and pd.key == self.egg_name.lower(): pd._version = self.egg_version pd._parsed_version = parse_version(self.egg_version) self.distribution._patched_dist = None @@ -127,7 +143,7 @@ class egg_info(Command): to the file. """ log.info("writing %s to %s", what, filename) - if sys.version_info >= (3,): + if PY3: data = data.encode("utf-8") if not self.dry_run: f = open(filename, 'wb') @@ -153,7 +169,7 @@ class egg_info(Command): installer = self.distribution.fetch_build_egg for ep in iter_entry_points('egg_info.writers'): writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) # Get rid of native_libs.txt if it was put there by older bdist_egg nl = os.path.join(self.egg_info, "native_libs.txt") @@ -165,68 +181,96 @@ class egg_info(Command): def tags(self): version = '' if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() + version += self.tag_build + if self.tag_svn_revision: + rev = self.get_svn_revision() + if rev: # is 0 if it's not an svn working copy + version += '-r%s' % rev if self.tag_date: import time + version += time.strftime("-%Y%m%d") return version @staticmethod def get_svn_revision(): + if 'svn_utils' not in globals(): + return "0" return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) def find_sources(self): """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") mm = manifest_maker(self.distribution) mm.manifest = manifest_filename mm.run() self.filelist = mm.filelist def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' + bei = self.egg_name + '.egg-info' if self.egg_base != os.curdir: bei = os.path.join(self.egg_base, bei) if os.path.exists(bei): log.warn( - "-"*78+'\n' + "-" * 78 + '\n' "Note: Your current .egg-info directory has a '-' in its name;" '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, + 'Please rename %s to %s to correct this problem.\n' + '-' * 78, bei, self.egg_info ) self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now + self.egg_info = bei # make it work for now + class FileList(_FileList): """File list that accepts only existing, platform-independent paths""" def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows + if item.endswith('\r'): # Fix older sdists built on Windows item = item[:-1] path = convert_path(item) - if sys.version_info >= (3,): - try: - if os.path.exists(path) or os.path.exists(path.encode('utf-8')): - self.files.append(path) - except UnicodeEncodeError: - # Accept UTF-8 filenames even if LANG=C - if os.path.exists(path.encode('utf-8')): - self.files.append(path) - else: - log.warn("'%s' not %s encodable -- skipping", path, - sys.getfilesystemencoding()) - else: - if os.path.exists(path): - self.files.append(path) + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) class manifest_maker(sdist): - template = "MANIFEST.in" def initialize_options(self): @@ -241,7 +285,7 @@ class manifest_maker(sdist): def run(self): self.filelist = FileList() if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list + self.write_manifest() # it must exist so it'll get in the list self.filelist.findall() self.add_defaults() if os.path.exists(self.template): @@ -251,30 +295,23 @@ class manifest_maker(sdist): self.filelist.remove_duplicates() self.write_manifest() + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file + """ + Write the file list in 'self.filelist' to the manifest file named by 'self.manifest'. """ - # The manifest must be UTF-8 encodable. See #303. - if sys.version_info >= (3,): - files = [] - for file in self.filelist.files: - try: - file.encode("utf-8") - except UnicodeEncodeError: - log.warn("'%s' not UTF-8 encodable -- skipping" % file) - else: - files.append(file) - self.filelist.files = files + self.filelist._repair() - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) - def warn(self, msg): # suppress missing-file warnings from sdist + def warn(self, msg): # suppress missing-file warnings from sdist if not msg.startswith("standard file not found:"): sdist.warn(self, msg) @@ -288,15 +325,41 @@ class manifest_maker(sdist): elif os.path.exists(self.manifest): self.read_manifest() ei_cmd = self.get_finalized_command('egg_info') + self._add_egg_info(cmd=ei_cmd) self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + def _add_egg_info(self, cmd): + """ + Add paths for egg-info files for an external egg-base. + + The egg-info files are written to egg-base. If egg-base is + outside the current working directory, this method + searchs the egg-base directory for files to include + in the manifest. Uses distutils.filelist.findall (which is + really the version monkeypatched in by setuptools/__init__.py) + to perform the search. + + Since findall records relative paths, prefix the returned + paths with cmd.egg_base, so add_default's include_pattern call + (which is looking for the absolute cmd.egg_info) will match + them. + """ + if cmd.egg_base == os.curdir: + # egg-info files were already added by something else + return + + discovered = distutils.filelist.findall(cmd.egg_base) + resolved = (os.path.join(cmd.egg_base, path) for path in discovered) + self.filelist.allfiles.extend(resolved) + def prune_file_list(self): build = self.get_finalized_command('build') base_dir = self.distribution.get_fullname() self.filelist.exclude_pattern(None, prefix=build.build_base) self.filelist.exclude_pattern(None, prefix=base_dir) sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) + self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, + is_regex=1) def write_file(filename, contents): @@ -304,11 +367,13 @@ def write_file(filename, contents): sequence of strings without line terminators) to it. """ contents = "\n".join(contents) - if sys.version_info >= (3,): - contents = contents.encode("utf-8") - f = open(filename, "wb") # always write POSIX-style manifest - f.write(contents) - f.close() + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + def write_pkg_info(cmd, basename, filename): log.info("writing %s", filename) @@ -323,10 +388,12 @@ def write_pkg_info(cmd, basename, filename): finally: metadata.name, metadata.version = oldname, oldver - safe = getattr(cmd.distribution,'zip_safe',None) + safe = getattr(cmd.distribution, 'zip_safe', None) from setuptools.command import bdist_egg + bdist_egg.write_safety_flag(cmd.egg_info, safe) + def warn_depends_obsolete(cmd, basename, filename): if os.path.exists(filename): log.warn( @@ -335,55 +402,75 @@ def warn_depends_obsolete(cmd, basename, filename): ) +def _write_requirements(stream, reqs): + lines = yield_lines(reqs or ()) + append_cr = lambda line: line + '\n' + lines = map(append_cr, lines) + stream.writelines(lines) + + def write_requirements(cmd, basename, filename): dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) + data = StringIO() + _write_requirements(data, dist.install_requires) + extras_require = dist.extras_require or {} + for extra in sorted(extras_require): + data.write('\n[{extra}]\n'.format(**vars())) + _write_requirements(data, extras_require[extra]) + cmd.write_or_delete_file("requirements", filename, data.getvalue()) + + +def write_setup_requirements(cmd, basename, filename): + data = StringIO() + _write_requirements(data, cmd.distribution.setup_requires) + cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) + def write_toplevel_names(cmd, basename, filename): pkgs = dict.fromkeys( [ - k.split('.',1)[0] + k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names() ] ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') def overwrite_arg(cmd, basename, filename): write_arg(cmd, basename, filename, True) + def write_arg(cmd, basename, filename, force=False): argname = os.path.splitext(basename)[0] value = getattr(cmd.distribution, argname, None) if value is not None: - value = '\n'.join(value)+'\n' + value = '\n'.join(value) + '\n' cmd.write_or_delete_file(argname, filename, value, force) + def write_entries(cmd, basename, filename): ep = cmd.distribution.entry_points - if isinstance(ep,basestring) or ep is None: + if isinstance(ep, basestring) or ep is None: data = ep elif ep is not None: data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): + for section, contents in sorted(ep.items()): + if not isinstance(contents, basestring): contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) + contents = '\n'.join(sorted(map(str, contents.values()))) + data.append('[%s]\n%s\n\n' % (section, contents)) data = ''.join(data) cmd.write_or_delete_file('entry points', filename, data, True) + def get_pkg_info_revision(): # See if we can get a -r### off of PKG-INFO, in case this is an sdist of # a subversion revision # if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') + f = open('PKG-INFO', 'rU') for line in f: match = re.match(r"Version:.*-r(\d+)\s*$", line) if match: diff --git a/awx/lib/site-packages/setuptools/command/install.py b/awx/lib/site-packages/setuptools/command/install.py index 459cd3cd59..d2bca2ec59 100644 --- a/awx/lib/site-packages/setuptools/command/install.py +++ b/awx/lib/site-packages/setuptools/command/install.py @@ -1,18 +1,26 @@ -import setuptools -import sys -import glob -from distutils.command.install import install as _install from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig -class install(_install): +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://bitbucket.org/pypa/setuptools/issue/199/ +_install = orig.install + + +class install(orig.install): """Use easy_install to install the package, w/dependencies""" - user_options = _install.user_options + [ + user_options = orig.install.user_options + [ ('old-and-unmanageable', None, "Try not to use this!"), ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), + "used by system package builders to create 'flat' eggs"), ] - boolean_options = _install.boolean_options + [ + boolean_options = orig.install.boolean_options + [ 'old-and-unmanageable', 'single-version-externally-managed', ] new_commands = [ @@ -22,13 +30,12 @@ class install(_install): _nc = dict(new_commands) def initialize_options(self): - _install.initialize_options(self) + orig.install.initialize_options(self) self.old_and_unmanageable = None self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! def finalize_options(self): - _install.finalize_options(self) + orig.install.finalize_options(self) if self.root: self.single_version_externally_managed = True elif self.single_version_externally_managed: @@ -41,7 +48,7 @@ class install(_install): def handle_extra_path(self): if self.root or self.single_version_externally_managed: # explicit backward-compatibility mode, allow extra_path to work - return _install.handle_extra_path(self) + return orig.install.handle_extra_path(self) # Ignore extra_path when installing an egg (or being run by another # command without --root or --single-version-externally-managed @@ -51,28 +58,41 @@ class install(_install): def run(self): # Explicit request for old-style install? Just do it if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) + return orig.install.run(self) - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) else: self.do_egg_install() + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + def do_egg_install(self): easy_install = self.distribution.get_command_class('easy_install') @@ -97,7 +117,9 @@ class install(_install): cmd.run() setuptools.bootstrap_install_from = None + # XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in install._nc - ] + install.new_commands +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/awx/lib/site-packages/setuptools/command/install_egg_info.py b/awx/lib/site-packages/setuptools/command/install_egg_info.py index f44b34b555..fd0f118b33 100644 --- a/awx/lib/site-packages/setuptools/command/install_egg_info.py +++ b/awx/lib/site-packages/setuptools/command/install_egg_info.py @@ -1,7 +1,10 @@ +from distutils import log, dir_util +import os + from setuptools import Command from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources +import pkg_resources + class install_egg_info(Command): """Install an .egg-info directory for the package""" @@ -16,26 +19,26 @@ class install_egg_info(Command): self.install_dir = None def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) + self.set_undefined_options('install_lib', + ('install_dir', 'install_dir')) ei_cmd = self.get_finalized_command("egg_info") basename = pkg_resources.Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' + ).egg_name() + '.egg-info' self.source = ei_cmd.egg_info self.target = os.path.join(self.install_dir, basename) self.outputs = [self.target] def run(self): self.run_command('egg_info') - target = self.target if os.path.isdir(self.target) and not os.path.islink(self.target): dir_util.remove_tree(self.target, dry_run=self.dry_run) elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) + self.execute(os.unlink, (self.target,), "Removing " + self.target) if not self.dry_run: pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) + self.execute( + self.copytree, (), "Copying %s to %s" % (self.source, self.target) ) self.install_namespaces() @@ -44,82 +47,70 @@ class install_egg_info(Command): def copytree(self): # Copy the .egg-info tree to site-packages - def skimmer(src,dst): + def skimmer(src, dst): # filter out source-control directories; note that 'src' is always # a '/'-separated path, regardless of platform. 'dst' is a # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: + for skip in '.svn/', 'CVS/': + if src.startswith(skip) or '/' + skip in src: return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst + unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wt') - for pkg in nsp: - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,types,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() + if not nsp: + return + filename, ext = os.path.splitext(self.target) + filename += '-nspkg.pth' + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + _nspkg_tmpl = ( + "import sys, types, os", + "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", + "ie = os.path.exists(os.path.join(p,'__init__.py'))", + "m = not ie and " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + @classmethod + def _gen_nspkg_line(cls, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + tmpl_lines = cls._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += cls._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' def _get_all_ns_packages(self): - nsp = {} + """Return sorted list of all package namespaces""" + nsp = set() for pkg in self.distribution.namespace_packages or []: pkg = pkg.split('.') while pkg: - nsp['.'.join(pkg)] = 1 + nsp.add('.'.join(pkg)) pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - + return sorted(nsp) diff --git a/awx/lib/site-packages/setuptools/command/install_lib.py b/awx/lib/site-packages/setuptools/command/install_lib.py index 82afa1421b..9b7722276b 100644 --- a/awx/lib/site-packages/setuptools/command/install_lib.py +++ b/awx/lib/site-packages/setuptools/command/install_lib.py @@ -1,21 +1,11 @@ -from distutils.command.install_lib import install_lib as _install_lib import os +import imp +from itertools import product, starmap +import distutils.command.install_lib as orig -class install_lib(_install_lib): +class install_lib(orig.install_lib): """Don't add compiled flags to filenames of non-Python files""" - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - def run(self): self.build() outfiles = self.install() @@ -24,30 +14,81 @@ class install_lib(_install_lib): self.byte_compile(outfiles) def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages + """ + Return a collections.Sized collections.Container of paths to be + excluded for single_version_externally_managed installations. + """ + all_packages = ( + pkg + for ns_pkg in self._get_SVEM_NSPs() + for pkg in self._all_packages(ns_pkg) + ) - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude + excl_specs = product(all_packages, self._gen_exclusion_paths()) + return set(starmap(self._exclude_pkg_path, excl_specs)) + + def _exclude_pkg_path(self, pkg, exclusion_path): + """ + Given a package name and exclusion path within that package, + compute the full exclusion path. + """ + parts = pkg.split('.') + [exclusion_path] + return os.path.join(self.install_dir, *parts) + + @staticmethod + def _all_packages(pkg_name): + """ + >>> list(install_lib._all_packages('foo.bar.baz')) + ['foo.bar.baz', 'foo.bar', 'foo'] + """ + while pkg_name: + yield pkg_name + pkg_name, sep, child = pkg_name.rpartition('.') + + def _get_SVEM_NSPs(self): + """ + Get namespace packages (list) but only for + single_version_externally_managed installations and empty otherwise. + """ + # TODO: is it necessary to short-circuit here? i.e. what's the cost + # if get_finalized_command is called even when namespace_packages is + # False? + if not self.distribution.namespace_packages: + return [] + + install_cmd = self.get_finalized_command('install') + svem = install_cmd.single_version_externally_managed + + return self.distribution.namespace_packages if svem else [] + + @staticmethod + def _gen_exclusion_paths(): + """ + Generate file paths to be excluded for namespace packages (bytecode + cache files). + """ + # always exclude the package module itself + yield '__init__.py' + + yield '__init__.pyc' + yield '__init__.pyo' + + if not hasattr(imp, 'get_tag'): + return + + base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) + yield base + '.pyc' + yield base + '.pyo' def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 ): assert preserve_mode and preserve_times and not preserve_symlinks exclude = self.get_exclusions() if not exclude: - return _install_lib.copy_tree(self, infile, outfile) + return orig.install_lib.copy_tree(self, infile, outfile) # Exclude namespace package __init__.py* files from the output @@ -58,7 +99,8 @@ class install_lib(_install_lib): def pf(src, dst): if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) + log.warn("Skipping installation of %s (namespace package)", + dst) return False log.info("copying %s -> %s", src, os.path.dirname(dst)) @@ -69,14 +111,8 @@ class install_lib(_install_lib): return outfiles def get_outputs(self): - outputs = _install_lib.get_outputs(self) + outputs = orig.install_lib.get_outputs(self) exclude = self.get_exclusions() if exclude: return [f for f in outputs if f not in exclude] return outputs - - - - - - diff --git a/awx/lib/site-packages/setuptools/command/install_scripts.py b/awx/lib/site-packages/setuptools/command/install_scripts.py index 105dabca6a..be66cb2252 100644 --- a/awx/lib/site-packages/setuptools/command/install_scripts.py +++ b/awx/lib/site-packages/setuptools/command/install_scripts.py @@ -1,23 +1,23 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os from distutils import log +import distutils.command.install_scripts as orig +import os -class install_scripts(_install_scripts): +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): """Do normal script install, plus any egg_info wrapper scripts""" def initialize_options(self): - _install_scripts.initialize_options(self) + orig.install_scripts.initialize_options(self) self.no_ep = False def run(self): - from setuptools.command.easy_install import get_script_args - from setuptools.command.easy_install import sys_executable + import setuptools.command.easy_install as ei self.run_command("egg_info") if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles + orig.install_scripts.run(self) # run first to set up self.outfiles else: self.outfiles = [] if self.no_ep: @@ -30,16 +30,23 @@ class install_scripts(_install_scripts): ei_cmd.egg_name, ei_cmd.egg_version, ) bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - is_wininst = getattr( - self.get_finalized_command("bdist_wininst"), '_is_running', False - ) - for args in get_script_args(dist, executable, is_wininst): + exec_param = getattr(bs_cmd, 'executable', None) + bw_cmd = self.get_finalized_command("bdist_wininst") + is_wininst = getattr(bw_cmd, '_is_running', False) + writer = ei.ScriptWriter + if is_wininst: + exec_param = "python.exe" + writer = ei.WindowsScriptWriter + # resolve the writer to the environment + writer = writer.best() + cmd = writer.command_spec_class.best().from_param(exec_param) + for args in writer.get_args(dist, cmd.as_header()): self.write_script(*args) def write_script(self, script_name, contents, mode="t", *ignored): """Write an executable file to the scripts directory""" from setuptools.command.easy_install import chmod, current_umask + log.info("Installing %s script to %s", script_name, self.install_dir) target = os.path.join(self.install_dir, script_name) self.outfiles.append(target) @@ -47,8 +54,7 @@ class install_scripts(_install_scripts): mask = current_umask() if not self.dry_run: ensure_directory(target) - f = open(target,"w"+mode) + f = open(target, "w" + mode) f.write(contents) f.close() - chmod(target, 0x1FF-mask) # 0777 - + chmod(target, 0o777 - mask) diff --git a/awx/lib/site-packages/setuptools/command/launcher manifest.xml b/awx/lib/site-packages/setuptools/command/launcher manifest.xml index 844d2264cd..5972a96d8d 100644 --- a/awx/lib/site-packages/setuptools/command/launcher manifest.xml +++ b/awx/lib/site-packages/setuptools/command/launcher manifest.xml @@ -1,15 +1,15 @@ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> -<assemblyIdentity version="1.0.0.0" - processorArchitecture="X86" - name="%(name)s" - type="win32"/> + <assemblyIdentity version="1.0.0.0" + processorArchitecture="X86" + name="%(name)s" + type="win32"/> <!-- Identify the application security requirements. --> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> - <security> - <requestedPrivileges> - <requestedExecutionLevel level="asInvoker" uiAccess="false"/> - </requestedPrivileges> - </security> + <security> + <requestedPrivileges> + <requestedExecutionLevel level="asInvoker" uiAccess="false"/> + </requestedPrivileges> + </security> </trustInfo> </assembly> diff --git a/awx/lib/site-packages/setuptools/command/register.py b/awx/lib/site-packages/setuptools/command/register.py index 3b2e085907..8d6336a14d 100644 --- a/awx/lib/site-packages/setuptools/command/register.py +++ b/awx/lib/site-packages/setuptools/command/register.py @@ -1,10 +1,10 @@ -from distutils.command.register import register as _register +import distutils.command.register as orig -class register(_register): - __doc__ = _register.__doc__ + +class register(orig.register): + __doc__ = orig.register.__doc__ def run(self): # Make sure that we are using valid current name/version info self.run_command('egg_info') - _register.run(self) - + orig.register.run(self) diff --git a/awx/lib/site-packages/setuptools/command/rotate.py b/awx/lib/site-packages/setuptools/command/rotate.py index b10acfb41f..1b073620ea 100644 --- a/awx/lib/site-packages/setuptools/command/rotate.py +++ b/awx/lib/site-packages/setuptools/command/rotate.py @@ -1,18 +1,20 @@ -import distutils, os -from setuptools import Command -from setuptools.compat import basestring from distutils.util import convert_path from distutils import log -from distutils.errors import * +from distutils.errors import DistutilsOptionError +import os + +from setuptools import Command +from setuptools.compat import basestring + class rotate(Command): """Delete older distributions""" description = "delete older distributions, keeping N newest files" user_options = [ - ('match=', 'm', "patterns to match (required)"), + ('match=', 'm', "patterns to match (required)"), ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), + ('keep=', 'k', "number of matching distributions to keep"), ] boolean_options = [] @@ -29,7 +31,7 @@ class rotate(Command): "(e.g. '.zip' or '.egg')" ) if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") + raise DistutilsOptionError("Must specify number of files to keep") try: self.keep = int(self.keep) except ValueError: @@ -38,46 +40,22 @@ class rotate(Command): self.match = [ convert_path(p.strip()) for p in self.match.split(',') ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) def run(self): self.run_command("egg_info") from glob import glob + for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] + pattern = self.distribution.get_name() + '*' + pattern + files = glob(os.path.join(self.dist_dir, pattern)) + files = [(os.path.getmtime(f), f) for f in files] files.sort() files.reverse() log.info("%d file(s) matching %s", len(files), pattern) files = files[self.keep:] - for (t,f) in files: + for (t, f) in files: log.info("Deleting %s", f) if not self.dry_run: os.unlink(f) - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awx/lib/site-packages/setuptools/command/saveopts.py b/awx/lib/site-packages/setuptools/command/saveopts.py index 7209be4cd9..611cec5528 100644 --- a/awx/lib/site-packages/setuptools/command/saveopts.py +++ b/awx/lib/site-packages/setuptools/command/saveopts.py @@ -1,7 +1,6 @@ -import distutils, os -from setuptools import Command from setuptools.command.setopt import edit_config, option_base + class saveopts(option_base): """Save command-line options to a file""" @@ -13,12 +12,11 @@ class saveopts(option_base): for cmd in dist.command_options: - if cmd=='saveopts': - continue # don't save our own options! + if cmd == 'saveopts': + continue # don't save our own options! - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val + for opt, (src, val) in dist.get_option_dict(cmd).items(): + if src == "command line": + settings.setdefault(cmd, {})[opt] = val edit_config(self.filename, settings, self.dry_run) - diff --git a/awx/lib/site-packages/setuptools/command/sdist.py b/awx/lib/site-packages/setuptools/command/sdist.py index 76e1c5f18b..851a177524 100644 --- a/awx/lib/site-packages/setuptools/command/sdist.py +++ b/awx/lib/site-packages/setuptools/command/sdist.py @@ -1,16 +1,17 @@ -import os -import re -import sys from glob import glob +from distutils import log +import distutils.command.sdist as orig +import os +import sys + +from setuptools.compat import PY3 +from setuptools.utils import cs_path_exists import pkg_resources -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -from distutils import log -from setuptools import svn_utils -READMES = ('README', 'README.rst', 'README.txt') +READMES = 'README', 'README.rst', 'README.txt' +_default_revctrl = list def walk_revctrl(dirname=''): """Find all files under revision control""" @@ -19,60 +20,7 @@ def walk_revctrl(dirname=''): yield item -#TODO will need test case -class re_finder(object): - """ - Finder that locates files based on entries in a file matched by a - regular expression. - """ - - def __init__(self, path, pattern, postproc=lambda x: x): - self.pattern = pattern - self.postproc = postproc - self.entries_path = convert_path(path) - - def _finder(self, dirname, filename): - f = open(filename,'rU') - try: - data = f.read() - finally: - f.close() - for match in self.pattern.finditer(data): - path = match.group(1) - # postproc was formerly used when the svn finder - # was an re_finder for calling unescape - path = self.postproc(path) - yield svn_utils.joinpath(dirname, path) - - def find(self, dirname=''): - path = svn_utils.joinpath(dirname, self.entries_path) - - if not os.path.isfile(path): - # entries file doesn't exist - return - for path in self._finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in self.find(path): - yield item - __call__ = find - - -def _default_revctrl(dirname=''): - 'Primary svn_cvs entry point' - for finder in finders: - for item in finder(dirname): - yield item - - -finders = [ - re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)), - svn_utils.svn_finder, -] - - -class sdist(_sdist): +class sdist(orig.sdist): """Smart sdist that finds anything supported by revision control""" user_options = [ @@ -84,7 +32,7 @@ class sdist(_sdist): ('dist-dir=', 'd', "directory to put the source distribution archive(s) in " "[default: dist]"), - ] + ] negative_opt = {} @@ -92,7 +40,7 @@ class sdist(_sdist): self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) self.check_readme() # Run sub commands @@ -102,12 +50,13 @@ class sdist(_sdist): # Call check_metadata only if no 'check' command # (distutils <= 2.6) import distutils.command + if 'check' not in distutils.command.__all__: self.check_metadata() self.make_distribution() - dist_files = getattr(self.distribution,'dist_files',[]) + dist_files = getattr(self.distribution, 'dist_files', []) for file in self.archive_files: data = ('sdist', '', file) if data not in dist_files: @@ -119,17 +68,19 @@ class sdist(_sdist): # Doing so prevents an error when easy_install attempts to delete the # file. try: - _sdist.read_template(self) + orig.sdist.read_template(self) except: - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() + _, _, tb = sys.exc_info() + tb.tb_next.tb_frame.f_locals['template'].close() raise + # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle # has been fixed, so only override the method if we're using an earlier # Python. has_leaky_handle = ( - sys.version_info < (2,7,2) - or (3,0) <= sys.version_info < (3,1,4) - or (3,2) <= sys.version_info < (3,2,1) + sys.version_info < (2, 7, 2) + or (3, 0) <= sys.version_info < (3, 1, 4) + or (3, 2) <= sys.version_info < (3, 2, 1) ) if has_leaky_handle: read_template = __read_template_hack @@ -142,7 +93,7 @@ class sdist(_sdist): alts = fn got_it = 0 for fn in alts: - if os.path.exists(fn): + if cs_path_exists(fn): got_it = 1 self.filelist.append(fn) break @@ -151,14 +102,14 @@ class sdist(_sdist): self.warn("standard file not found: should have one of " + ', '.join(alts)) else: - if os.path.exists(fn): + if cs_path_exists(fn): self.filelist.append(fn) else: self.warn("standard file '%s' not found" % fn) optional = ['test/test*.py', 'setup.cfg'] for pattern in optional: - files = list(filter(os.path.isfile, glob(pattern))) + files = list(filter(cs_path_exists, glob(pattern))) if files: self.filelist.extend(files) @@ -193,15 +144,16 @@ class sdist(_sdist): return else: self.warn( - "standard file not found: should have one of " +', '.join(READMES) + "standard file not found: should have one of " + + ', '.join(READMES) ) def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) + orig.sdist.make_release_tree(self, base_dir, files) # Save any egg_info command line options used to create this sdist dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os,'link') and os.path.exists(dest): + if hasattr(os, 'link') and os.path.exists(dest): # unlink and re-copy, since it might be hard-linked, and # we don't want to change the source version os.unlink(dest) @@ -219,7 +171,8 @@ class sdist(_sdist): first_line = fp.readline() finally: fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to @@ -230,7 +183,7 @@ class sdist(_sdist): manifest = open(self.manifest, 'rbU') for line in manifest: # The manifest must contain UTF-8. See #303. - if sys.version_info >= (3,): + if PY3: try: line = line.decode('UTF-8') except UnicodeDecodeError: diff --git a/awx/lib/site-packages/setuptools/command/setopt.py b/awx/lib/site-packages/setuptools/command/setopt.py index aa468c88fe..a04d6032ad 100644 --- a/awx/lib/site-packages/setuptools/command/setopt.py +++ b/awx/lib/site-packages/setuptools/command/setopt.py @@ -1,8 +1,11 @@ -import distutils, os -from setuptools import Command from distutils.util import convert_path from distutils import log -from distutils.errors import * +from distutils.errors import DistutilsOptionError +import distutils +import os + +from setuptools import Command + __all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] @@ -12,33 +15,20 @@ def config_file(kind="local"): `kind` must be one of "local", "global", or "user" """ - if kind=='local': + if kind == 'local': return 'setup.cfg' - if kind=='global': + if kind == 'global': return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' + os.path.dirname(distutils.__file__), 'distutils.cfg' ) - if kind=='user': - dot = os.name=='posix' and '.' or '' + if kind == 'user': + dot = os.name == 'posix' and '.' or '' return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) raise ValueError( "config_file() type must be 'local', 'global', or 'user'", kind ) - - - - - - - - - - - - - def edit_config(filename, settings, dry_run=False): """Edit a configuration file to include `settings` @@ -48,6 +38,7 @@ def edit_config(filename, settings, dry_run=False): A setting of ``None`` means to delete that setting. """ from setuptools.compat import ConfigParser + log.debug("Reading configuration from %s", filename) opts = ConfigParser.RawConfigParser() opts.read([filename]) @@ -59,46 +50,49 @@ def edit_config(filename, settings, dry_run=False): if not opts.has_section(section): log.debug("Adding new section [%s] to %s", section, filename) opts.add_section(section) - for option,value in options.items(): + for option, value in options.items(): if value is None: - log.debug("Deleting %s.%s from %s", + log.debug( + "Deleting %s.%s from %s", section, option, filename ) - opts.remove_option(section,option) + opts.remove_option(section, option) if not opts.options(section): log.info("Deleting empty [%s] section from %s", - section, filename) + section, filename) opts.remove_section(section) else: log.debug( "Setting %s.%s to %r in %s", section, option, value, filename ) - opts.set(section,option,value) + opts.set(section, option, value) log.info("Writing %s", filename) if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() + with open(filename, 'w') as f: + opts.write(f) + class option_base(Command): """Abstract base class for commands that mess with config files""" - + user_options = [ ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), + "save options to the site-wide distutils.cfg file"), ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), + "save options to the current user's pydistutils.cfg file"), ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), + "configuration file to use (default=setup.cfg)"), ] boolean_options = [ 'global-config', 'user-config', - ] + ] def initialize_options(self): self.global_config = None - self.user_config = None + self.user_config = None self.filename = None def finalize_options(self): @@ -111,14 +105,12 @@ class option_base(Command): filenames.append(self.filename) if not filenames: filenames.append(config_file('local')) - if len(filenames)>1: + if len(filenames) > 1: raise DistutilsOptionError( "Must specify only one configuration file option", filenames ) - self.filename, = filenames - - + self.filename, = filenames class setopt(option_base): @@ -128,9 +120,9 @@ class setopt(option_base): user_options = [ ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), ] + option_base.user_options boolean_options = option_base.boolean_options + ['remove'] @@ -152,13 +144,7 @@ class setopt(option_base): def run(self): edit_config( self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} + self.command: {self.option.replace('-', '_'): self.set_value} }, self.dry_run ) - - - - - - diff --git a/awx/lib/site-packages/setuptools/command/test.py b/awx/lib/site-packages/setuptools/command/test.py index db2fc7b140..42689f7012 100644 --- a/awx/lib/site-packages/setuptools/command/test.py +++ b/awx/lib/site-packages/setuptools/command/test.py @@ -1,12 +1,17 @@ -from setuptools import Command from distutils.errors import DistutilsOptionError +from unittest import TestLoader +import unittest import sys -from pkg_resources import * -from pkg_resources import _namespace_packages -from unittest import TestLoader, main + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from setuptools.compat import PY3 +from setuptools.py31compat import unittest_main + class ScanningLoader(TestLoader): - def loadTestsFromModule(self, module): """Return a suite of all tests cases contained in the given module @@ -15,48 +20,45 @@ class ScanningLoader(TestLoader): the return value to the tests. """ tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) + tests.append(TestLoader.loadTestsFromModule(self, module)) if hasattr(module, "additional_tests"): tests.append(module.additional_tests()) if hasattr(module, '__path__'): for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file else: continue tests.append(self.loadTestsFromName(submodule)) - if len(tests)!=1: + if len(tests) != 1: return self.suiteClass(tests) else: - return tests[0] # don't create a nested suite for only one return + return tests[0] # don't create a nested suite for only one return class test(Command): - """Command to run unit tests after in-place build""" description = "run unit tests after in-place build" user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), ] def initialize_options(self): self.test_suite = None self.test_module = None self.test_loader = None - + self.test_runner = None def finalize_options(self): @@ -64,7 +66,7 @@ class test(Command): if self.test_module is None: self.test_suite = self.distribution.test_suite else: - self.test_suite = self.test_module+".test_suite" + self.test_suite = self.test_module + ".test_suite" elif self.test_module: raise DistutilsOptionError( "You may specify a module or a suite, but not both" @@ -73,16 +75,18 @@ class test(Command): self.test_args = [self.test_suite] if self.verbose: - self.test_args.insert(0,'--verbose') + self.test_args.insert(0, '--verbose') if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) + self.test_loader = getattr(self.distribution, 'test_loader', None) if self.test_loader is None: self.test_loader = "setuptools.command.test:ScanningLoader" - - + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) def with_project_on_sys_path(self, func): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): + with_2to3 = PY3 and getattr(self.distribution, 'use_2to3', False) + + if with_2to3: # If we run 2to3 we can not do this inplace: # Ensure metadata is up-to-date @@ -122,10 +126,10 @@ class test(Command): sys.modules.update(old_modules) working_set.__init__() - def run(self): if self.distribution.install_requires: - self.distribution.fetch_build_eggs(self.distribution.install_requires) + self.distribution.fetch_build_eggs( + self.distribution.install_requires) if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) @@ -137,14 +141,11 @@ class test(Command): self.announce('running "unittest %s"' % cmd) self.with_project_on_sys_path(self.run_tests) - def run_tests(self): - import unittest - # Purge modules under test from sys.modules. The test loader will # re-import them from the build location. Required when 2to3 is used # with namespace packages. - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): + if PY3 and getattr(self.distribution, 'use_2to3', False): module = self.test_args[-1].split('.')[0] if module in _namespace_packages: del_modules = [] @@ -156,43 +157,19 @@ class test(Command): del_modules.append(name) list(map(sys.modules.__delitem__, del_modules)) - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - cks = loader_class() - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = cks + unittest_main( + None, None, [unittest.__file__] + self.test_args, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.resolve()() diff --git a/awx/lib/site-packages/setuptools/command/upload_docs.py b/awx/lib/site-packages/setuptools/command/upload_docs.py index cad7a52d1b..001ee936e4 100644 --- a/awx/lib/site-packages/setuptools/command/upload_docs.py +++ b/awx/lib/site-packages/setuptools/command/upload_docs.py @@ -5,6 +5,10 @@ Implements a Distutils 'upload_docs' subcommand (upload documentation to PyPI's pythonhosted.org). """ +from base64 import standard_b64encode +from distutils import log +from distutils.errors import DistutilsOptionError +from distutils.command.upload import upload import os import socket import zipfile @@ -12,14 +16,9 @@ import tempfile import sys import shutil -from base64 import standard_b64encode +from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 from pkg_resources import iter_entry_points -from distutils import log -from distutils.errors import DistutilsOptionError -from distutils.command.upload import upload - -from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 errors = 'surrogateescape' if PY3 else 'strict' @@ -33,7 +32,6 @@ def b(s, encoding='utf-8'): class upload_docs(upload): - description = 'Upload documentation to PyPI' user_options = [ @@ -42,7 +40,7 @@ class upload_docs(upload): ('show-response', None, 'display full response text from server'), ('upload-dir=', None, 'directory to upload'), - ] + ] boolean_options = upload.boolean_options def has_sphinx(self): @@ -159,7 +157,7 @@ class upload_docs(upload): elif schema == 'https': conn = httplib.HTTPSConnection(netloc) else: - raise AssertionError("unsupported schema "+schema) + raise AssertionError("unsupported schema " + schema) data = '' try: @@ -171,8 +169,7 @@ class upload_docs(upload): conn.putheader('Authorization', auth) conn.endheaders() conn.send(body) - except socket.error: - e = sys.exc_info()[1] + except socket.error as e: self.announce(str(e), log.ERROR) return @@ -190,4 +187,4 @@ class upload_docs(upload): self.announce('Upload failed (%s): %s' % (r.status, r.reason), log.ERROR) if self.show_response: - print('-'*75, r.read(), '-'*75) + print('-' * 75, r.read(), '-' * 75) diff --git a/awx/lib/site-packages/setuptools/compat.py b/awx/lib/site-packages/setuptools/compat.py index 7b824ba2ff..73e6e4aa7e 100644 --- a/awx/lib/site-packages/setuptools/compat.py +++ b/awx/lib/site-packages/setuptools/compat.py @@ -1,15 +1,15 @@ import sys import itertools -if sys.version_info[0] < 3: - PY3 = False +PY3 = sys.version_info >= (3,) +PY2 = not PY3 +if PY2: basestring = basestring import __builtin__ as builtins import ConfigParser from StringIO import StringIO BytesIO = StringIO - execfile = execfile func_code = lambda o: o.func_code func_globals = lambda o: o.func_globals im_func = lambda o: o.im_func @@ -21,8 +21,6 @@ if sys.version_info[0] < 3: iteritems = lambda o: o.iteritems() long_type = long maxsize = sys.maxint - next = lambda o: o.next() - numeric_types = (int, long, float) unichr = unichr unicode = unicode bytes = str @@ -34,9 +32,8 @@ if sys.version_info[0] < 3: exec("""def reraise(tp, value, tb=None): raise tp, value, tb""") -else: - PY3 = True +if PY3: basestring = str import builtins import configparser as ConfigParser @@ -51,8 +48,6 @@ else: iteritems = lambda o: o.items() long_type = int maxsize = sys.maxsize - next = next - numeric_types = (int, float) unichr = chr unicode = str bytes = bytes @@ -65,18 +60,6 @@ else: ) filterfalse = itertools.filterfalse - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - f = open(fn, 'rb') - try: - source = f.read() - finally: - f.close() - exec(compile(source, fn, 'exec'), globs, locs) - def reraise(tp, value, tb=None): if value.__traceback__ is not tb: raise value.with_traceback(tb) diff --git a/awx/lib/site-packages/setuptools/depends.py b/awx/lib/site-packages/setuptools/depends.py index 8b9d1217b1..e87ef3f39c 100644 --- a/awx/lib/site-packages/setuptools/depends.py +++ b/awx/lib/site-packages/setuptools/depends.py @@ -1,7 +1,9 @@ -from __future__ import generators -import sys, imp, marshal +import sys +import imp +import marshal from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion +from distutils.version import StrictVersion +from setuptools import compat __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' @@ -10,9 +12,8 @@ __all__ = [ class Require: """A prerequisite to building or installing a distribution""" - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): + def __init__(self, name, requested_version, module, homepage='', + attribute=None, format=None): if format is None and requested_version is not None: format = StrictVersion @@ -25,20 +26,17 @@ class Require: self.__dict__.update(locals()) del self.self - def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return '%s-%s' % (self.name,self.requested_version) return self.name - - def version_ok(self,version): + def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return self.attribute is None or self.format is None or \ str(version) != "unknown" and version >= self.requested_version - def get_version(self, paths=None, default="unknown"): """Get version number of installed module, 'None', or 'default' @@ -59,20 +57,18 @@ class Require: except ImportError: return None - v = get_module_constant(self.module,self.attribute,default,paths) + v = get_module_constant(self.module, self.attribute, default, paths) if v is not None and v is not default and self.format is not None: return self.format(v) return v - - def is_present(self,paths=None): + def is_present(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None - - def is_current(self,paths=None): + def is_current(self, paths=None): """Return true if dependency is present and up-to-date on 'paths'""" version = self.get_version(paths) if version is None: @@ -103,7 +99,7 @@ def _iter_code(code): ptr += 3 if op==EXTENDED_ARG: - extended_arg = arg * long_type(65536) + extended_arg = arg * compat.long_type(65536) continue else: @@ -113,14 +109,6 @@ def _iter_code(code): yield op,arg - - - - - - - - def find_module(module, paths=None): """Just like 'imp.find_module()', but with package support""" @@ -140,28 +128,6 @@ def find_module(module, paths=None): return info - - - - - - - - - - - - - - - - - - - - - - def get_module_constant(module, symbol, default=-1, paths=None): """Find 'module' by searching 'paths', and extract 'symbol' @@ -171,7 +137,7 @@ def get_module_constant(module, symbol, default=-1, paths=None): constant. Otherwise, return 'default'.""" try: - f, path, (suffix,mode,kind) = find_module(module,paths) + f, path, (suffix, mode, kind) = find_module(module, paths) except ImportError: # Module doesn't exist return None @@ -187,23 +153,17 @@ def get_module_constant(module, symbol, default=-1, paths=None): else: # Not something we can parse; we'll have to import it. :( if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) + imp.load_module(module, f, path, (suffix, mode, kind)) + return getattr(sys.modules[module], symbol, None) finally: if f: f.close() - return extract_constant(code,symbol,default) + return extract_constant(code, symbol, default) - - - - - - -def extract_constant(code,symbol,default=-1): +def extract_constant(code, symbol, default=-1): """Extract the constant value of 'symbol' from 'code' If the name 'symbol' is bound to a constant value by the Python code @@ -236,11 +196,20 @@ def extract_constant(code,symbol,default=-1): return const else: const = default - -if sys.platform.startswith('java') or sys.platform == 'cli': - # XXX it'd be better to test assertions about bytecode instead... - del extract_constant, get_module_constant - __all__.remove('extract_constant') - __all__.remove('get_module_constant') +def _update_globals(): + """ + Patch the globals to remove the objects not available on some platforms. + + XXX it'd be better to test assertions about bytecode instead. + """ + + if not sys.platform.startswith('java') and sys.platform != 'cli': + return + incompatible = 'extract_constant', 'get_module_constant' + for name in incompatible: + del globals()[name] + __all__.remove(name) + +_update_globals() diff --git a/awx/lib/site-packages/setuptools/dist.py b/awx/lib/site-packages/setuptools/dist.py index 0801ae74ff..bc29b131b8 100644 --- a/awx/lib/site-packages/setuptools/dist.py +++ b/awx/lib/site-packages/setuptools/dist.py @@ -4,17 +4,23 @@ import re import os import sys import warnings +import numbers import distutils.log import distutils.core import distutils.cmd +import distutils.dist from distutils.core import Distribution as _Distribution from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError) from setuptools.depends import Require -from setuptools.compat import numeric_types, basestring +from setuptools.compat import basestring, PY2 +from setuptools import windows_support import pkg_resources +packaging = pkg_resources.packaging + + def _get_unpatched(cls): """Protect against re-patching the distutils if reloaded @@ -31,6 +37,27 @@ def _get_unpatched(cls): _Distribution = _get_unpatched(_Distribution) +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + # from Python 3.4 + def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info +_patch_distribution_metadata_write_pkg_info() + sequence = tuple, list def check_importable(dist, attr, value): @@ -104,8 +131,7 @@ def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: pkg_resources.EntryPoint.parse_map(value) - except ValueError: - e = sys.exc_info()[1] + except ValueError as e: raise DistutilsSetupError(e) def check_test_suite(dist, attr, value): @@ -236,15 +262,36 @@ class Distribution(_Distribution): self.dependency_links = attrs.pop('dependency_links', []) assert_string_list(self,'dependency_links',self.dependency_links) if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) + self.fetch_build_eggs(attrs['setup_requires']) for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): if not hasattr(self,ep.name): setattr(self,ep.name,None) _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, numeric_types): + if isinstance(self.metadata.version, numbers.Number): # Some people apparently take "version number" too literally :) self.metadata.version = str(self.metadata.version) + if self.metadata.version is not None: + try: + ver = packaging.version.Version(self.metadata.version) + normalized_version = str(ver) + if self.metadata.version != normalized_version: + warnings.warn( + "The version specified requires normalization, " + "consider using '%s' instead of '%s'." % ( + normalized_version, + self.metadata.version, + ) + ) + self.metadata.version = normalized_version + except (packaging.version.InvalidVersion, TypeError): + warnings.warn( + "The version specified (%r) is an invalid version, this " + "may not work as expected with newer versions of " + "setuptools, pip, and PyPI. Please see PEP 440 for more " + "details." % self.metadata.version + ) + def parse_command_line(self): """Process features after parsing command line options""" result = _Distribution.parse_command_line(self) @@ -258,12 +305,13 @@ class Distribution(_Distribution): def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg, - replace_conflicting=True - ): - working_set.add(dist, replace=True) + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) def finalize_options(self): _Distribution.finalize_options(self) @@ -281,6 +329,21 @@ class Distribution(_Distribution): else: self.convert_2to3_doctests = [] + def get_egg_cache_dir(self): + egg_cache_dir = os.path.join(os.curdir, '.eggs') + if not os.path.exists(egg_cache_dir): + os.mkdir(egg_cache_dir) + windows_support.hide_file(egg_cache_dir) + readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') + with open(readme_txt_filename, 'w') as f: + f.write('This directory contains eggs that were downloaded ' + 'by setuptools to build, test, and run plug-ins.\n\n') + f.write('This directory caches those eggs to prevent ' + 'repeated downloads.\n\n') + f.write('However, it is safe to delete this directory.\n\n') + + return egg_cache_dir + def fetch_build_egg(self, req): """Fetch an egg needed for building""" @@ -304,8 +367,9 @@ class Distribution(_Distribution): if 'find_links' in opts: links = opts['find_links'][1].split() + links opts['find_links'] = ('setup', links) + install_dir = self.get_egg_cache_dir() cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, + dist, args=["x"], install_dir=install_dir, exclude_scripts=True, always_copy=False, build_directory=None, editable=False, upgrade=False, multi_version=True, no_report=True, user=False ) @@ -369,7 +433,8 @@ class Distribution(_Distribution): def print_commands(self): for ep in pkg_resources.iter_entry_points('distutils.commands'): if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() self.cmdclass[ep.name] = cmdclass return _Distribution.print_commands(self) @@ -608,7 +673,7 @@ class Distribution(_Distribution): """ import sys - if sys.version_info < (3,) or self.help_commands: + if PY2 or self.help_commands: return _Distribution.handle_display_options(self, option_order) # Stdout may be StringIO (e.g. in tests) diff --git a/awx/lib/site-packages/setuptools/extension.py b/awx/lib/site-packages/setuptools/extension.py index d7892d3d9f..8178ed33d7 100644 --- a/awx/lib/site-packages/setuptools/extension.py +++ b/awx/lib/site-packages/setuptools/extension.py @@ -1,11 +1,17 @@ import sys +import re +import functools import distutils.core +import distutils.errors import distutils.extension -from setuptools.dist import _get_unpatched +from .dist import _get_unpatched +from . import msvc9_support _Extension = _get_unpatched(distutils.core.Extension) +msvc9_support.patch_for_specialized_compiler() + def have_pyrex(): """ Return True if Cython or Pyrex can be imported. @@ -26,16 +32,21 @@ class Extension(_Extension): def __init__(self, *args, **kw): _Extension.__init__(self, *args, **kw) - if not have_pyrex(): - self._convert_pyx_sources_to_c() + self._convert_pyx_sources_to_lang() - def _convert_pyx_sources_to_c(self): - "convert .pyx extensions to .c" - def pyx_to_c(source): - if source.endswith('.pyx'): - source = source[:-4] + '.c' - return source - self.sources = list(map(pyx_to_c, self.sources)) + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if have_pyrex(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) class Library(Extension): """Just like a regular Extension, but built as a library instead""" diff --git a/awx/lib/site-packages/setuptools/msvc9_support.py b/awx/lib/site-packages/setuptools/msvc9_support.py new file mode 100644 index 0000000000..a69c7474c8 --- /dev/null +++ b/awx/lib/site-packages/setuptools/msvc9_support.py @@ -0,0 +1,63 @@ +try: + import distutils.msvc9compiler +except ImportError: + pass + +unpatched = dict() + +def patch_for_specialized_compiler(): + """ + Patch functions in distutils.msvc9compiler to use the standalone compiler + build for Python (Windows only). Fall back to original behavior when the + standalone compiler is not available. + """ + if 'distutils' not in globals(): + # The module isn't available to be patched + return + + if unpatched: + # Already patched + return + + unpatched.update(vars(distutils.msvc9compiler)) + + distutils.msvc9compiler.find_vcvarsall = find_vcvarsall + distutils.msvc9compiler.query_vcvarsall = query_vcvarsall + +def find_vcvarsall(version): + Reg = distutils.msvc9compiler.Reg + VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = VC_BASE % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = VC_BASE % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + import os + vcvarsall = os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + + return unpatched['find_vcvarsall'](version) + +def query_vcvarsall(version, *args, **kwargs): + try: + return unpatched['query_vcvarsall'](version, *args, **kwargs) + except distutils.errors.DistutilsPlatformError as exc: + if exc and "vcvarsall.bat" in exc.args[0]: + message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0]) + if int(version) == 9: + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + raise distutils.errors.DistutilsPlatformError( + message + ' Get it from http://aka.ms/vcpython27' + ) + raise distutils.errors.DistutilsPlatformError(message) + raise diff --git a/awx/lib/site-packages/setuptools/package_index.py b/awx/lib/site-packages/setuptools/package_index.py index 167c34e5b4..5ed19130d7 100644 --- a/awx/lib/site-packages/setuptools/package_index.py +++ b/awx/lib/site-packages/setuptools/package_index.py @@ -632,16 +632,15 @@ class PackageIndex(Environment): shutil.copy2(filename, dst) filename=dst - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] + with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) ) - ) - file.close() return filename elif match: @@ -660,7 +659,7 @@ class PackageIndex(Environment): def _download_to(self, url, filename): self.info("Downloading %s", url) # Download the file - fp, tfp, info = None, None, None + fp, info = None, None try: checker = HashChecker.from_url(url) fp = self.open_url(strip_fragment(url)) @@ -677,21 +676,20 @@ class PackageIndex(Environment): sizes = get_all_headers(headers, 'Content-Length') size = max(map(int, sizes)) self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - checker.feed(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - self.check_hash(checker, filename, tfp) + with open(filename,'wb') as tfp: + while True: + block = fp.read(bs) + if block: + checker.feed(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + self.check_hash(checker, filename, tfp) return headers finally: if fp: fp.close() - if tfp: tfp.close() def reporthook(self, url, filename, blocknum, blksize, size): pass # no-op @@ -701,25 +699,21 @@ class PackageIndex(Environment): return local_open(url) try: return open_with_auth(url, self.opener) - except (ValueError, httplib.InvalidURL): - v = sys.exc_info()[1] + except (ValueError, httplib.InvalidURL) as v: msg = ' '.join([str(arg) for arg in v.args]) if warning: self.warn(warning, msg) else: raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError: - v = sys.exc_info()[1] + except urllib2.HTTPError as v: return v - except urllib2.URLError: - v = sys.exc_info()[1] + except urllib2.URLError as v: if warning: self.warn(warning, v.reason) else: raise DistutilsError("Download error for %s: %s" % (url, v.reason)) - except httplib.BadStatusLine: - v = sys.exc_info()[1] + except httplib.BadStatusLine as v: if warning: self.warn(warning, v.line) else: @@ -728,8 +722,7 @@ class PackageIndex(Environment): 'down, %s' % (url, v.line) ) - except httplib.HTTPException: - v = sys.exc_info()[1] + except httplib.HTTPException as v: if warning: self.warn(warning, v) else: @@ -1040,9 +1033,8 @@ def local_open(url): files = [] for f in os.listdir(filename): if f=='index.html': - fp = open(os.path.join(filename,f),'r') - body = fp.read() - fp.close() + with open(os.path.join(filename,f),'r') as fp: + body = fp.read() break elif os.path.isdir(os.path.join(filename,f)): f+='/' diff --git a/awx/lib/site-packages/setuptools/py31compat.py b/awx/lib/site-packages/setuptools/py31compat.py index dbb324b0e3..c487ac0439 100644 --- a/awx/lib/site-packages/setuptools/py31compat.py +++ b/awx/lib/site-packages/setuptools/py31compat.py @@ -1,3 +1,6 @@ +import sys +import unittest + __all__ = ['get_config_vars', 'get_path'] try: @@ -9,3 +12,41 @@ except ImportError: if name not in ('platlib', 'purelib'): raise ValueError("Name must be purelib or platlib") return get_python_lib(name=='platlib') + +try: + # Python >=3.2 + from tempfile import TemporaryDirectory +except ImportError: + import shutil + import tempfile + class TemporaryDirectory(object): + """" + Very simple temporary directory context manager. + Will try to delete afterward, but will also ignore OS and similar + errors on deletion. + """ + def __init__(self): + self.name = None # Handle mkdtemp raising an exception + self.name = tempfile.mkdtemp() + + def __enter__(self): + return self.name + + def __exit__(self, exctype, excvalue, exctrace): + try: + shutil.rmtree(self.name, True) + except OSError: #removal errors are not the only possible + pass + self.name = None + + +unittest_main = unittest.main + +_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2) +if _PY31: + # on Python 3.1, translate testRunner==None to TextTestRunner + # for compatibility with Python 2.6, 2.7, and 3.2+ + def unittest_main(*args, **kwargs): + if 'testRunner' in kwargs and kwargs['testRunner'] is None: + kwargs['testRunner'] = unittest.TextTestRunner + return unittest.main(*args, **kwargs) diff --git a/awx/lib/site-packages/setuptools/sandbox.py b/awx/lib/site-packages/setuptools/sandbox.py index 042c595897..83283ca3dc 100644 --- a/awx/lib/site-packages/setuptools/sandbox.py +++ b/awx/lib/site-packages/setuptools/sandbox.py @@ -5,6 +5,8 @@ import operator import functools import itertools import re +import contextlib +import pickle import pkg_resources @@ -20,58 +22,221 @@ _open = open from distutils.errors import DistutilsError from pkg_resources import working_set -from setuptools.compat import builtins, execfile +from setuptools import compat +from setuptools.compat import builtins __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + # Python 2.6 compile requires LF for newlines, so use deprecated + # Universal newlines support. + if sys.version_info < (2, 7): + mode += 'U' + with open(filename, mode) as stream: + script = stream.read() + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + + +@contextlib.contextmanager +def save_argv(): + saved = sys.argv[:] + try: + yield saved + finally: + sys.argv[:] = saved + + +@contextlib.contextmanager +def save_path(): + saved = sys.path[:] + try: + yield saved + finally: + sys.path[:] = saved + + +@contextlib.contextmanager +def override_temp(replacement): + """ + Monkey-patch tempfile.tempdir with replacement, ensuring it exists + """ + if not os.path.isdir(replacement): + os.makedirs(replacement) + + saved = tempfile.tempdir + + tempfile.tempdir = replacement + + try: + yield + finally: + tempfile.tempdir = saved + + +@contextlib.contextmanager +def pushd(target): + saved = os.getcwd() + os.chdir(target) + try: + yield saved + finally: + os.chdir(saved) + + +class UnpickleableException(Exception): + """ + An exception representing another Exception that could not be pickled. + """ + @classmethod + def dump(cls, type, exc): + """ + Always return a dumped (pickled) type and exc. If exc can't be pickled, + wrap it in UnpickleableException first. + """ + try: + return pickle.dumps(type), pickle.dumps(exc) + except Exception: + return cls.dump(cls, cls(repr(exc))) + + +class ExceptionSaver: + """ + A Context Manager that will save an exception, serialized, and restore it + later. + """ + def __enter__(self): + return self + + def __exit__(self, type, exc, tb): + if not exc: + return + + # dump the exception + self._saved = UnpickleableException.dump(type, exc) + self._tb = tb + + # suppress the exception + return True + + def resume(self): + "restore and re-raise any exception" + + if '_saved' not in vars(self): + return + + type, exc = map(pickle.loads, self._saved) + compat.reraise(type, exc, self._tb) + + +@contextlib.contextmanager +def save_modules(): + """ + Context in which imported modules are saved. + + Translates exceptions internal to the context into the equivalent exception + outside the context. + """ + saved = sys.modules.copy() + with ExceptionSaver() as saved_exc: + yield saved + + sys.modules.update(saved) + # remove any modules imported since + del_modules = ( + mod_name for mod_name in sys.modules + if mod_name not in saved + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ) + _clear_modules(del_modules) + + saved_exc.resume() + + +def _clear_modules(module_names): + for mod_name in list(module_names): + del sys.modules[mod_name] + + +@contextlib.contextmanager +def save_pkg_resources_state(): + saved = pkg_resources.__getstate__() + try: + yield saved + finally: + pkg_resources.__setstate__(saved) + + +@contextlib.contextmanager +def setup_context(setup_dir): + temp_dir = os.path.join(setup_dir, 'temp') + with save_pkg_resources_state(): + with save_modules(): + hide_setuptools() + with save_path(): + with save_argv(): + with override_temp(temp_dir): + with pushd(setup_dir): + # ensure setuptools commands are available + __import__('setuptools') + yield + + +def _needs_hiding(mod_name): + """ + >>> _needs_hiding('setuptools') + True + >>> _needs_hiding('pkg_resources') + True + >>> _needs_hiding('setuptools_plugin') + False + >>> _needs_hiding('setuptools.__init__') + True + >>> _needs_hiding('distutils') + True + """ + pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)') + return bool(pattern.match(mod_name)) + + +def hide_setuptools(): + """ + Remove references to setuptools' modules from sys.modules to allow the + invocation to import the most appropriate setuptools. This technique is + necessary to avoid issues such as #315 where setuptools upgrading itself + would fail to find a function declared in the metadata. + """ + modules = filter(_needs_hiding, sys.modules) + _clear_modules(modules) + + def run_setup(setup_script, args): """Run a distutils setup script, sandboxed in its directory""" - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - save_modules = sys.modules.copy() - pr_state = pkg_resources.__getstate__() - try: - tempfile.tempdir = temp_dir - os.chdir(setup_dir) + with setup_context(setup_dir): try: sys.argv[:] = [setup_script]+list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() working_set.callbacks.append(lambda dist:dist.activate()) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit: - v = sys.exc_info()[1] + def runner(): + ns = dict(__file__=setup_script, __name__='__main__') + _execfile(setup_script, ns) + DirectorySandbox(setup_dir).run(runner) + except SystemExit as v: if v.args and v.args[0]: raise # Normal exit, just return - finally: - pkg_resources.__setstate__(pr_state) - sys.modules.update(save_modules) - # remove any modules imported within the sandbox - del_modules = [ - mod_name for mod_name in sys.modules - if mod_name not in save_modules - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ] - list(map(sys.modules.__delitem__, del_modules)) - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp class AbstractSandbox: @@ -268,7 +433,7 @@ class DirectorySandbox(AbstractSandbox): self._violation(operation, src, dst, *args, **kw) return (src,dst) - def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777 + def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) diff --git a/awx/lib/site-packages/setuptools/script (dev).tmpl b/awx/lib/site-packages/setuptools/script (dev).tmpl new file mode 100644 index 0000000000..d58b1bb5bf --- /dev/null +++ b/awx/lib/site-packages/setuptools/script (dev).tmpl @@ -0,0 +1,5 @@ +# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').require(%(spec)r) +__file__ = %(dev_path)r +exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/awx/lib/site-packages/setuptools/script.tmpl b/awx/lib/site-packages/setuptools/script.tmpl new file mode 100644 index 0000000000..ff5efbcab3 --- /dev/null +++ b/awx/lib/site-packages/setuptools/script.tmpl @@ -0,0 +1,3 @@ +# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/awx/lib/site-packages/setuptools/site-patch.py b/awx/lib/site-packages/setuptools/site-patch.py index a7166f1407..c2168019ad 100644 --- a/awx/lib/site-packages/setuptools/site-patch.py +++ b/awx/lib/site-packages/setuptools/site-patch.py @@ -1,5 +1,6 @@ def __boot(): - import sys, os, os.path + import sys + import os PYTHONPATH = os.environ.get('PYTHONPATH') if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): PYTHONPATH = [] @@ -49,13 +50,13 @@ def __boot(): addsitedir(item) sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) + + d, nd = makepath(stdpath[0]) insert_at = None new_path = [] for item in sys.path: - p,np = makepath(item) + p, np = makepath(item) if np==nd and insert_at is None: # We've hit the first 'system' path entry, so added entries go here @@ -67,17 +68,9 @@ def __boot(): # new path after the insert point, back-insert it new_path.insert(insert_at, item) insert_at += 1 - + sys.path[:] = new_path -if __name__=='site': +if __name__=='site': __boot() del __boot - - - - - - - - diff --git a/awx/lib/site-packages/setuptools/ssl_support.py b/awx/lib/site-packages/setuptools/ssl_support.py index 7b5f429f8f..cc7db067e9 100644 --- a/awx/lib/site-packages/setuptools/ssl_support.py +++ b/awx/lib/site-packages/setuptools/ssl_support.py @@ -178,12 +178,19 @@ class VerifyingHTTPSConn(HTTPSConnection): if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): self.sock = sock self._tunnel() + # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 + # change self.host to mean the proxy server host when tunneling is + # being used. Adapt, since we are interested in the destination + # host for the match_hostname() comparison. + actual_host = self._tunnel_host + else: + actual_host = self.host self.sock = ssl.wrap_socket( sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle ) try: - match_hostname(self.sock.getpeercert(), self.host) + match_hostname(self.sock.getpeercert(), actual_host) except CertificateError: self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() diff --git a/awx/lib/site-packages/setuptools/tests/__init__.py b/awx/lib/site-packages/setuptools/tests/__init__.py index b5328ce67a..b8a29cbac2 100644 --- a/awx/lib/site-packages/setuptools/tests/__init__.py +++ b/awx/lib/site-packages/setuptools/tests/__init__.py @@ -1,8 +1,6 @@ """Tests for the 'setuptools' package""" import sys import os -import unittest -from setuptools.tests import doctest import distutils.core import distutils.cmd from distutils.errors import DistutilsOptionError, DistutilsPlatformError @@ -11,24 +9,13 @@ from distutils.core import Extension from distutils.version import LooseVersion from setuptools.compat import func_code -from setuptools.compat import func_code +import pytest + import setuptools.dist import setuptools.depends as dep from setuptools import Feature from setuptools.depends import Require -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite( - os.path.join('tests', 'api_tests.txt'), - optionflags=doctest.ELLIPSIS, package='pkg_resources', - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - def makeSetup(**args): """Return distribution from 'setup(**args)', without executing commands""" @@ -43,7 +30,12 @@ def makeSetup(**args): distutils.core._setup_stop_after = None -class DependsTests(unittest.TestCase): +needs_bytecode = pytest.mark.skipif( + not hasattr(dep, 'get_module_constant'), + reason="bytecode support not available", +) + +class TestDepends: def testExtractConst(self): if not hasattr(dep, 'extract_constant'): @@ -56,86 +48,77 @@ class DependsTests(unittest.TestCase): y = z fc = func_code(f1) + # unrecognized name - self.assertEqual(dep.extract_constant(fc,'q', -1), None) + assert dep.extract_constant(fc,'q', -1) is None # constant assigned - self.assertEqual(dep.extract_constant(fc,'x', -1), "test") + dep.extract_constant(fc,'x', -1) == "test" # expression assigned - self.assertEqual(dep.extract_constant(fc,'y', -1), -1) + dep.extract_constant(fc,'y', -1) == -1 # recognized name, not assigned - self.assertEqual(dep.extract_constant(fc,'z', -1), None) + dep.extract_constant(fc,'z', -1) is None def testFindModule(self): - self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') - self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') + with pytest.raises(ImportError): + dep.find_module('no-such.-thing') + with pytest.raises(ImportError): + dep.find_module('setuptools.non-existent') f,p,i = dep.find_module('setuptools.tests') f.close() + @needs_bytecode def testModuleExtract(self): - if not hasattr(dep, 'get_module_constant'): - # skip on non-bytecode platforms - return - from email import __version__ - self.assertEqual( - dep.get_module_constant('email','__version__'), __version__ - ) - self.assertEqual( - dep.get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - dep.get_module_constant('setuptools.tests','__doc__'),__doc__ - ) + assert dep.get_module_constant('email','__version__') == __version__ + assert dep.get_module_constant('sys','version') == sys.version + assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__ + @needs_bytecode def testRequire(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platformsh - return - req = Require('Email','1.0.3','email') - self.assertEqual(req.name, 'Email') - self.assertEqual(req.module, 'email') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Email-1.0.3') + assert req.name == 'Email' + assert req.module == 'email' + assert req.requested_version == '1.0.3' + assert req.attribute == '__version__' + assert req.full_name() == 'Email-1.0.3' from email import __version__ - self.assertEqual(req.get_version(), __version__) - self.assertTrue(req.version_ok('1.0.9')) - self.assertTrue(not req.version_ok('0.9.1')) - self.assertTrue(not req.version_ok('unknown')) + assert req.get_version() == __version__ + assert req.version_ok('1.0.9') + assert not req.version_ok('0.9.1') + assert not req.version_ok('unknown') - self.assertTrue(req.is_present()) - self.assertTrue(req.is_current()) + assert req.is_present() + assert req.is_current() req = Require('Email 3000','03000','email',format=LooseVersion) - self.assertTrue(req.is_present()) - self.assertTrue(not req.is_current()) - self.assertTrue(not req.version_ok('unknown')) + assert req.is_present() + assert not req.is_current() + assert not req.version_ok('unknown') req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.assertTrue(not req.is_present()) - self.assertTrue(not req.is_current()) + assert not req.is_present() + assert not req.is_current() req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') + assert req.format is None + assert req.attribute is None + assert req.requested_version is None + assert req.full_name() == 'Tests' + assert req.homepage == 'http://example.com' paths = [os.path.dirname(p) for p in __path__] - self.assertTrue(req.is_present(paths)) - self.assertTrue(req.is_current(paths)) + assert req.is_present(paths) + assert req.is_current(paths) -class DistroTests(unittest.TestCase): +class TestDistro: - def setUp(self): + def setup_method(self, method): self.e1 = Extension('bar.ext',['bar.c']) self.e2 = Extension('c.y', ['y.c']) @@ -147,21 +130,21 @@ class DistroTests(unittest.TestCase): ) def testDistroType(self): - self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) + assert isinstance(self.dist,setuptools.dist.Distribution) def testExcludePackage(self): self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) + assert self.dist.packages == ['b','c'] self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) + assert self.dist.packages == ['c'] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1, self.e2] self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) + assert self.dist.packages == [] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1] # test removals from unspecified options makeSetup().exclude_package('x') @@ -169,21 +152,21 @@ class DistroTests(unittest.TestCase): def testIncludeExclude(self): # remove an extension self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) + assert self.dist.ext_modules == [self.e2] # add it back in self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + assert self.dist.ext_modules == [self.e2, self.e1] # should not add duplicate self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) + assert self.dist.ext_modules == [self.e2, self.e1] def testExcludePackages(self): self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) + assert self.dist.packages == [] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1] def testEmpty(self): dist = makeSetup() @@ -192,49 +175,41 @@ class DistroTests(unittest.TestCase): dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) def testContents(self): - self.assertTrue(self.dist.has_contents_for('a')) + assert self.dist.has_contents_for('a') self.dist.exclude_package('a') - self.assertTrue(not self.dist.has_contents_for('a')) + assert not self.dist.has_contents_for('a') - self.assertTrue(self.dist.has_contents_for('b')) + assert self.dist.has_contents_for('b') self.dist.exclude_package('b') - self.assertTrue(not self.dist.has_contents_for('b')) + assert not self.dist.has_contents_for('b') - self.assertTrue(self.dist.has_contents_for('c')) + assert self.dist.has_contents_for('c') self.dist.exclude_package('c') - self.assertTrue(not self.dist.has_contents_for('c')) + assert not self.dist.has_contents_for('c') def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) + with pytest.raises(DistutilsSetupError): + self.dist.include(nonexistent_option='x') + with pytest.raises(DistutilsSetupError): + self.dist.exclude(nonexistent_option='x') + with pytest.raises(DistutilsSetupError): + self.dist.include(packages={'x':'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(packages={'x':'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.include(ext_modules={'x':'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(ext_modules={'x':'y'}) - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) + with pytest.raises(DistutilsSetupError): + self.dist.include(package_dir=['q']) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(package_dir=['q']) -class FeatureTests(unittest.TestCase): +class TestFeatures: - def setUp(self): + def setup_method(self, method): self.req = Require('Distutils','1.0.3','distutils') self.dist = makeSetup( features={ @@ -256,80 +231,75 @@ class FeatureTests(unittest.TestCase): ) def testDefaults(self): - self.assertTrue(not - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.assertTrue( - Feature("test",standard=True,remove='x').include_by_default() - ) + assert not Feature( + "test",standard=True,remove='x',available=False + ).include_by_default() + assert Feature("test",standard=True,remove='x').include_by_default() # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") + with pytest.raises(DistutilsSetupError): + Feature("test") def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) + with pytest.raises(DistutilsPlatformError): + self.dist.features['dwim'].include_in(self.dist) def testFeatureOptions(self): dist = self.dist - self.assertTrue( + assert ( ('with-dwim',None,'include DWIM') in dist.feature_options ) - self.assertTrue( + assert ( ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options ) - self.assertTrue( + assert ( ('with-bar',None,'include bar (default)') in dist.feature_options ) - self.assertTrue( + assert ( ('without-bar',None,'exclude bar') in dist.feature_options ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.assertTrue(not 'without-baz' in dist.feature_negopt) + assert dist.feature_negopt['without-foo'] == 'with-foo' + assert dist.feature_negopt['without-bar'] == 'with-bar' + assert dist.feature_negopt['without-dwim'] == 'with-dwim' + assert (not 'without-baz' in dist.feature_negopt) def testUseFeatures(self): dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.assertTrue(not 'bar_et' in dist.py_modules) - self.assertTrue(not 'pkg.bar' in dist.packages) - self.assertTrue('pkg.baz' in dist.packages) - self.assertTrue('scripts/baz_it' in dist.scripts) - self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) + assert dist.with_foo == 1 + assert dist.with_bar == 0 + assert dist.with_baz == 1 + assert (not 'bar_et' in dist.py_modules) + assert (not 'pkg.bar' in dist.packages) + assert ('pkg.baz' in dist.packages) + assert ('scripts/baz_it' in dist.scripts) + assert (('libfoo','foo/foofoo.c') in dist.libraries) + assert dist.ext_modules == [] + assert dist.require_features == [self.req] # If we ask for bar, it should fail because we explicitly disabled # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') + with pytest.raises(DistutilsOptionError): + dist.include_feature('bar') def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) + with pytest.raises(SystemExit): + makeSetup(features={'x':Feature('x', remove='y')}) -class TestCommandTests(unittest.TestCase): +class TestCommandTests: def testTestIsCommand(self): test_cmd = makeSetup().get_command_obj('test') - self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) + assert (isinstance(test_cmd, distutils.cmd.Command)) def testLongOptSuiteWNoDefault(self): ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) ts1 = ts1.get_command_obj('test') ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') + assert ts1.test_suite == 'foo.tests.suite' def testDefaultSuite(self): ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') + assert ts2.test_suite == 'bar.tests.suite' def testDefaultWModuleOnCmdLine(self): ts3 = makeSetup( @@ -337,16 +307,17 @@ class TestCommandTests(unittest.TestCase): script_args=['test','-m','foo.tests'] ).get_command_obj('test') ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') + assert ts3.test_module == 'foo.tests' + assert ts3.test_suite == 'foo.tests.test_suite' def testConflictingOptions(self): ts4 = makeSetup( script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) + with pytest.raises(DistutilsOptionError): + ts4.ensure_finalized() def testNoSuite(self): ts5 = makeSetup().get_command_obj('test') ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) + assert ts5.test_suite == None diff --git a/awx/lib/site-packages/setuptools/tests/contexts.py b/awx/lib/site-packages/setuptools/tests/contexts.py new file mode 100644 index 0000000000..4a46176557 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/contexts.py @@ -0,0 +1,93 @@ +import tempfile +import os +import shutil +import sys +import contextlib +import site + +from ..compat import StringIO + + +@contextlib.contextmanager +def tempdir(cd=lambda dir:None, **kwargs): + temp_dir = tempfile.mkdtemp(**kwargs) + orig_dir = os.getcwd() + try: + cd(temp_dir) + yield temp_dir + finally: + cd(orig_dir) + shutil.rmtree(temp_dir) + + +@contextlib.contextmanager +def environment(**replacements): + """ + In a context, patch the environment with replacements. Pass None values + to clear the values. + """ + saved = dict( + (key, os.environ[key]) + for key in replacements + if key in os.environ + ) + + # remove values that are null + remove = (key for (key, value) in replacements.items() if value is None) + for key in list(remove): + os.environ.pop(key, None) + replacements.pop(key) + + os.environ.update(replacements) + + try: + yield saved + finally: + for key in replacements: + os.environ.pop(key, None) + os.environ.update(saved) + + +@contextlib.contextmanager +def argv(repl): + old_argv = sys.argv[:] + sys.argv[:] = repl + yield + sys.argv[:] = old_argv + + +@contextlib.contextmanager +def quiet(): + """ + Redirect stdout/stderr to StringIO objects to prevent console output from + distutils commands. + """ + + old_stdout = sys.stdout + old_stderr = sys.stderr + new_stdout = sys.stdout = StringIO() + new_stderr = sys.stderr = StringIO() + try: + yield new_stdout, new_stderr + finally: + new_stdout.seek(0) + new_stderr.seek(0) + sys.stdout = old_stdout + sys.stderr = old_stderr + + +@contextlib.contextmanager +def save_user_site_setting(): + saved = site.ENABLE_USER_SITE + try: + yield saved + finally: + site.ENABLE_USER_SITE = saved + + +@contextlib.contextmanager +def suppress_exceptions(*excs): + try: + yield + except excs: + pass diff --git a/awx/lib/site-packages/setuptools/tests/environment.py b/awx/lib/site-packages/setuptools/tests/environment.py index 476d280ae2..a23c0504e7 100644 --- a/awx/lib/site-packages/setuptools/tests/environment.py +++ b/awx/lib/site-packages/setuptools/tests/environment.py @@ -1,119 +1,10 @@ import os -import zipfile import sys -import tempfile -import unittest -import shutil -import stat import unicodedata from subprocess import Popen as _Popen, PIPE as _PIPE -def _extract(self, member, path=None, pwd=None): - """for zipfile py2.5 borrowed from cpython""" - if not isinstance(member, zipfile.ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return _extract_member(self, member, path, pwd) - - -def _extract_from_zip(self, name, dest_path): - dest_file = open(dest_path, 'wb') - try: - dest_file.write(self.read(name)) - finally: - dest_file.close() - - -def _extract_member(self, member, targetpath, pwd): - """for zipfile py2.5 borrowed from cpython""" - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - _extract_from_zip(self, member.filename, targetpath) - - return targetpath - - -def _remove_dir(target): - - #on windows this seems to a problem - for dir_path, dirs, files in os.walk(target): - os.chmod(dir_path, stat.S_IWRITE) - for filename in files: - os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE) - shutil.rmtree(target) - - -class ZippedEnvironment(unittest.TestCase): - - datafile = None - dataname = None - old_cwd = None - - def setUp(self): - if self.datafile is None or self.dataname is None: - return - - if not os.path.isfile(self.datafile): - self.old_cwd = None - return - - self.old_cwd = os.getcwd() - - self.temp_dir = tempfile.mkdtemp() - zip_file, source, target = [None, None, None] - try: - zip_file = zipfile.ZipFile(self.datafile) - for files in zip_file.namelist(): - _extract(zip_file, files, self.temp_dir) - finally: - if zip_file: - zip_file.close() - del zip_file - - os.chdir(os.path.join(self.temp_dir, self.dataname)) - - def tearDown(self): - #Assume setUp was never completed - if self.dataname is None or self.datafile is None: - return - - try: - if self.old_cwd: - os.chdir(self.old_cwd) - _remove_dir(self.temp_dir) - except OSError: - #sigh? - pass - - def _which_dirs(cmd): result = set() for path in os.environ.get('PATH', '').split(os.pathsep): @@ -147,10 +38,13 @@ def run_setup_py(cmd, pypath=None, path=None, cmd = [sys.executable, "setup.py"] + list(cmd) - #regarding the shell argument, see: http://bugs.python.org/issue8557 + # http://bugs.python.org/issue8557 + shell = sys.platform == 'win32' + try: - proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE, - shell=(sys.platform == 'win32'), env=env) + proc = _Popen( + cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env, + ) data = proc.communicate()[data_stream] except OSError: @@ -158,7 +52,8 @@ def run_setup_py(cmd, pypath=None, path=None, #decode the console string if needed if hasattr(data, "decode"): - data = data.decode() # should use the preffered encoding + # use the default encoding + data = data.decode() data = unicodedata.normalize('NFC', data) #communciate calls wait() diff --git a/awx/lib/site-packages/setuptools/tests/fixtures.py b/awx/lib/site-packages/setuptools/tests/fixtures.py new file mode 100644 index 0000000000..c70c38cb71 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/fixtures.py @@ -0,0 +1,27 @@ +try: + from unittest import mock +except ImportError: + import mock +import pytest + +from . import contexts + + +@pytest.yield_fixture +def user_override(): + """ + Override site.USER_BASE and site.USER_SITE with temporary directories in + a context. + """ + with contexts.tempdir() as user_base: + with mock.patch('site.USER_BASE', user_base): + with contexts.tempdir() as user_site: + with mock.patch('site.USER_SITE', user_site): + with contexts.save_user_site_setting(): + yield + + +@pytest.yield_fixture +def tmpdir_cwd(tmpdir): + with tmpdir.as_cwd() as orig: + yield orig diff --git a/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html new file mode 100644 index 0000000000..92e4702f63 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/external.html @@ -0,0 +1,3 @@ +<html><body> +<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a> +</body></html> diff --git a/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html new file mode 100644 index 0000000000..fefb028bd3 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html @@ -0,0 +1,4 @@ +<html><body> +<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/> +<a href="../../external.html" rel="homepage">external homepage</a><br/> +</body></html> diff --git a/awx/lib/site-packages/setuptools/tests/py26compat.py b/awx/lib/site-packages/setuptools/tests/py26compat.py index d4fb891af6..c56808816e 100644 --- a/awx/lib/site-packages/setuptools/tests/py26compat.py +++ b/awx/lib/site-packages/setuptools/tests/py26compat.py @@ -1,14 +1,14 @@ -import unittest +import sys +import tarfile +import contextlib -try: - # provide skipIf for Python 2.4-2.6 - skipIf = unittest.skipIf -except AttributeError: - def skipIf(condition, reason): - def skipper(func): - def skip(*args, **kwargs): - return - if condition: - return skip - return func - return skipper +def _tarfile_open_ex(*args, **kwargs): + """ + Extend result as a context manager. + """ + return contextlib.closing(tarfile.open(*args, **kwargs)) + +if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): + tarfile_open = _tarfile_open_ex +else: + tarfile_open = tarfile.open diff --git a/awx/lib/site-packages/setuptools/tests/server.py b/awx/lib/site-packages/setuptools/tests/server.py index ae2381e355..6b21427992 100644 --- a/awx/lib/site-packages/setuptools/tests/server.py +++ b/awx/lib/site-packages/setuptools/tests/server.py @@ -1,11 +1,10 @@ """Basic http server for tests to simulate PyPI or custom indexes """ -import sys + import time import threading from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import (urllib2, URLError, HTTPServer, - SimpleHTTPRequestHandler) +from setuptools.compat import HTTPServer, SimpleHTTPRequestHandler class IndexServer(HTTPServer): """Basic single-threaded http server simulating a package index @@ -23,12 +22,8 @@ class IndexServer(HTTPServer): HTTPServer.__init__(self, server_address, RequestHandlerClass) self._run = True - def serve(self): - while self._run: - self.handle_request() - def start(self): - self.thread = threading.Thread(target=self.serve) + self.thread = threading.Thread(target=self.serve_forever) self.thread.start() def stop(self): @@ -37,19 +32,7 @@ class IndexServer(HTTPServer): # Let the server finish the last request and wait for a new one. time.sleep(0.1) - # self.shutdown is not supported on python < 2.6, so just - # set _run to false, and make a request, causing it to - # terminate. - self._run = False - url = 'http://127.0.0.1:%(server_port)s/' % vars(self) - try: - if sys.version_info >= (2, 6): - urllib2.urlopen(url, timeout=5) - else: - urllib2.urlopen(url) - except URLError: - # ignore any errors; all that's important is the request - pass + self.shutdown() self.thread.join() self.socket.close() @@ -77,6 +60,6 @@ class MockServer(HTTPServer, threading.Thread): def run(self): self.serve_forever() + @property def url(self): return 'http://localhost:%(server_port)s/' % vars(self) - url = property(url) diff --git a/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py b/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py index 1a12218645..ccfb2ea76b 100644 --- a/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py +++ b/awx/lib/site-packages/setuptools/tests/test_bdist_egg.py @@ -1,50 +1,32 @@ """develop tests """ -import sys -import os, re, shutil, tempfile, unittest -import tempfile -import site +import os +import re + +import pytest -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.bdist_egg import bdist_egg -from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution +from . import contexts + SETUP_PY = """\ from setuptools import setup setup(name='foo', py_modules=['hi']) """ -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - f = open('setup.py', 'w') +@pytest.yield_fixture +def setup_context(tmpdir): + with (tmpdir/'setup.py').open('w') as f: f.write(SETUP_PY) - f.close() - f = open('hi.py', 'w') + with (tmpdir/'hi.py').open('w') as f: f.write('1\n') - f.close() - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() + with tmpdir.as_cwd(): + yield tmpdir - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - def test_bdist_egg(self): +class Test: + def test_bdist_egg(self, setup_context, user_override): dist = Distribution(dict( script_name='setup.py', script_args=['bdist_egg'], @@ -52,18 +34,10 @@ class TestDevelopTest(unittest.TestCase): py_modules=['hi'] )) os.makedirs(os.path.join('build', 'src')) - old_stdout = sys.stdout - sys.stdout = o = StringIO() - try: + with contexts.quiet(): dist.parse_command_line() dist.run_commands() - finally: - sys.stdout = old_stdout # let's see if we got our egg link at the right place [content] = os.listdir('dist') - self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) - -def test_suite(): - return unittest.makeSuite(TestDevelopTest) - + assert re.match('foo-0.0.0-py[23].\d.egg$', content) diff --git a/awx/lib/site-packages/setuptools/tests/test_build_ext.py b/awx/lib/site-packages/setuptools/tests/test_build_ext.py index a520ced9d6..0719ba44ae 100644 --- a/awx/lib/site-packages/setuptools/tests/test_build_ext.py +++ b/awx/lib/site-packages/setuptools/tests/test_build_ext.py @@ -1,20 +1,18 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext +import distutils.command.build_ext as orig + from setuptools.command.build_ext import build_ext from setuptools.dist import Distribution -class TestBuildExtTest(unittest.TestCase): - +class TestBuildExt: def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map + """ + Setuptools needs to give back the same + result as distutils, even if the fullname + is not in ext_map. + """ dist = Distribution() cmd = build_ext(dist) cmd.ext_map['foo/bar'] = '' res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') + wanted = orig.build_ext.get_ext_filename(cmd, 'foo') assert res == wanted - diff --git a/awx/lib/site-packages/setuptools/tests/test_develop.py b/awx/lib/site-packages/setuptools/tests/test_develop.py index 7b90161a8a..ed1b194a78 100644 --- a/awx/lib/site-packages/setuptools/tests/test_develop.py +++ b/awx/lib/site-packages/setuptools/tests/test_develop.py @@ -1,14 +1,12 @@ """develop tests """ -import sys -import os, shutil, tempfile, unittest -import tempfile +import os +import shutil import site +import sys +import tempfile -from distutils.errors import DistutilsError from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.compat import StringIO from setuptools.dist import Distribution SETUP_PY = """\ @@ -23,10 +21,10 @@ setup(name='foo', INIT_PY = """print "foo" """ -class TestDevelopTest(unittest.TestCase): +class TestDevelopTest: - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + def setup_method(self, method): + if hasattr(sys, 'real_prefix'): return # Directory structure @@ -50,8 +48,8 @@ class TestDevelopTest(unittest.TestCase): self.old_site = site.USER_SITE site.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): + def teardown_method(self, method): + if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): return os.chdir(self.old_cwd) @@ -62,7 +60,7 @@ class TestDevelopTest(unittest.TestCase): site.USER_SITE = self.old_site def test_develop(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): + if hasattr(sys, 'real_prefix'): return dist = Distribution( dict(name='foo', @@ -86,7 +84,7 @@ class TestDevelopTest(unittest.TestCase): # let's see if we got our egg link at the right place content = os.listdir(site.USER_SITE) content.sort() - self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) + assert content == ['easy-install.pth', 'foo.egg-link'] # Check that we are using the right code. egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') @@ -100,23 +98,6 @@ class TestDevelopTest(unittest.TestCase): finally: init_file.close() if sys.version < "3": - self.assertEqual(init, 'print "foo"') + assert init == 'print "foo"' else: - self.assertEqual(init, 'print("foo")') - - def notest_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError: - e = sys.exc_info()[1] - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) + assert init == 'print("foo")' diff --git a/awx/lib/site-packages/setuptools/tests/test_dist_info.py b/awx/lib/site-packages/setuptools/tests/test_dist_info.py index a8adb68c2d..6d0ab58792 100644 --- a/awx/lib/site-packages/setuptools/tests/test_dist_info.py +++ b/awx/lib/site-packages/setuptools/tests/test_dist_info.py @@ -3,28 +3,20 @@ import os import shutil import tempfile -import unittest -import textwrap -try: - import ast -except: - pass +import pytest import pkg_resources +from .textwrap import DALS -from setuptools.tests.py26compat import skipIf -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() - -class TestDistInfo(unittest.TestCase): +class TestDistInfo: def test_distinfo(self): - dists = {} - for d in pkg_resources.find_distributions(self.tmpdir): - dists[d.project_name] = d + dists = dict( + (d.project_name, d) + for d in pkg_resources.find_distributions(self.tmpdir) + ) assert len(dists) == 2, dists @@ -34,50 +26,45 @@ class TestDistInfo(unittest.TestCase): assert versioned.version == '2.718' # from filename assert unversioned.version == '0.3' # from METADATA - @skipIf('ast' not in globals(), - "ast is used to test conditional dependencies (Python >= 2.6)") + @pytest.mark.importorskip('ast') def test_conditional_dependencies(self): - requires = [pkg_resources.Requirement.parse('splort==4'), - pkg_resources.Requirement.parse('quux>=1.1')] + specs = 'splort==4', 'quux>=1.1' + requires = list(map(pkg_resources.Requirement.parse, specs)) for d in pkg_resources.find_distributions(self.tmpdir): - self.assertEqual(d.requires(), requires[:1]) - self.assertEqual(d.requires(extras=('baz',)), requires) - self.assertEqual(d.extras, ['baz']) + assert d.requires() == requires[:1] + assert d.requires(extras=('baz',)) == requires + assert d.extras == ['baz'] - def setUp(self): + metadata_template = DALS(""" + Metadata-Version: 1.2 + Name: {name} + {version} + Requires-Dist: splort (==4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """) + + def setup_method(self, method): self.tmpdir = tempfile.mkdtemp() - versioned = os.path.join(self.tmpdir, - 'VersionedDistribution-2.718.dist-info') + dist_info_name = 'VersionedDistribution-2.718.dist-info' + versioned = os.path.join(self.tmpdir, dist_info_name) os.mkdir(versioned) - metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: VersionedDistribution - Requires-Dist: splort (4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - unversioned = os.path.join(self.tmpdir, - 'UnversionedDistribution.dist-info') + with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file: + metadata = self.metadata_template.format( + name='VersionedDistribution', + version='', + ).replace('\n\n', '\n') + metadata_file.write(metadata) + dist_info_name = 'UnversionedDistribution.dist-info' + unversioned = os.path.join(self.tmpdir, dist_info_name) os.mkdir(unversioned) - metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: UnversionedDistribution - Version: 0.3 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() + with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file: + metadata = self.metadata_template.format( + name='UnversionedDistribution', + version='Version: 0.3', + ) + metadata_file.write(metadata) - def tearDown(self): + def teardown_method(self, method): shutil.rmtree(self.tmpdir) diff --git a/awx/lib/site-packages/setuptools/tests/test_easy_install.py b/awx/lib/site-packages/setuptools/tests/test_easy_install.py index d2cc7a0fe6..4331d30e68 100644 --- a/awx/lib/site-packages/setuptools/tests/test_easy_install.py +++ b/awx/lib/site-packages/setuptools/tests/test_easy_install.py @@ -1,29 +1,44 @@ +# -*- coding: utf-8 -*- + """Easy install Tests """ +from __future__ import absolute_import + import sys import os import shutil import tempfile -import unittest import site import contextlib -import textwrap import tarfile import logging -import distutils.core +import itertools +import distutils.errors -from setuptools.compat import StringIO, BytesIO, next, urlparse -from setuptools.sandbox import run_setup, SandboxViolation -from setuptools.command.easy_install import ( - easy_install, fix_jython_executable, get_script_args, nt_quote_arg) +import pytest +try: + from unittest import mock +except ImportError: + import mock + +from setuptools import sandbox +from setuptools import compat +from setuptools.compat import StringIO, BytesIO, urlparse +from setuptools.sandbox import run_setup +import setuptools.command.easy_install as ei from setuptools.command.easy_install import PthDistributions from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution -from pkg_resources import working_set, VersionConflict +from pkg_resources import working_set from pkg_resources import Distribution as PRDistribution import setuptools.tests.server import pkg_resources +from .py26compat import tarfile_open +from . import contexts +from .textwrap import DALS + + class FakeDist(object): def get_entry_map(self, group): if group != 'console_scripts': @@ -33,134 +48,123 @@ class FakeDist(object): def as_requirement(self): return 'spec' -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point +SETUP_PY = DALS(""" + from setuptools import setup -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) + setup(name='foo') + """) -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): +class TestEasyInstallTest: def test_install_site_py(self): dist = Distribution() - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.sitepy_installed = False cmd.install_dir = tempfile.mkdtemp() try: cmd.install_site_py() sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assertTrue(os.path.exists(sitepy)) + assert os.path.exists(sitepy) finally: shutil.rmtree(cmd.install_dir) def test_get_script_args(self): + header = ei.CommandSpec.best().from_environment().as_header() + expected = header + DALS(""" + # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' + __requires__ = 'spec' + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.exit( + load_entry_point('spec', 'console_scripts', 'name')() + ) + """) dist = FakeDist() - old_platform = sys.platform - try: - name, script = [i for i in next(get_script_args(dist))][0:2] - finally: - sys.platform = old_platform + args = next(ei.ScriptWriter.get_args(dist)) + name, script = itertools.islice(args, 2) - self.assertEqual(script, WANTED) + assert script == expected def test_no_find_links(self): # new option '--no-find-links', that blocks find-links added at # the project level dist = Distribution() - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.check_pth_processing = lambda: True cmd.no_find_links = True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() - self.assertEqual(cmd.package_index.scanned_urls, {}) + assert cmd.package_index.scanned_urls == {} # let's try without it (default behavior) - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.check_pth_processing = lambda: True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() keys = sorted(cmd.package_index.scanned_urls.keys()) - self.assertEqual(keys, ['link1', 'link2']) + assert keys == ['link1', 'link2'] + + def test_write_exception(self): + """ + Test that `cant_write_to_target` is rendered as a DistutilsError. + """ + dist = Distribution() + cmd = ei.easy_install(dist) + cmd.install_dir = os.getcwd() + with pytest.raises(distutils.errors.DistutilsError): + cmd.cant_write_to_target() -class TestPTHFileWriter(unittest.TestCase): +class TestPTHFileWriter: def test_add_from_cwd_site_sets_dirty(self): '''a pth file manager should set dirty if a distribution is in site but also the cwd ''' pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assertTrue(not pth.dirty) + assert not pth.dirty pth.add(PRDistribution(os.getcwd())) - self.assertTrue(pth.dirty) + assert pth.dirty def test_add_from_site_is_ignored(self): - if os.name != 'nt': - location = '/test/location/does-not-have-to-exist' - else: - location = 'c:\\does_not_exist' + location = '/test/location/does-not-have-to-exist' + # PthDistributions expects all locations to be normalized + location = pkg_resources.normalize_path(location) pth = PthDistributions('does-not_exist', [location, ]) - self.assertTrue(not pth.dirty) + assert not pth.dirty pth.add(PRDistribution(location)) - self.assertTrue(not pth.dirty) + assert not pth.dirty -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') +@pytest.yield_fixture +def setup_context(tmpdir): + with (tmpdir/'setup.py').open('w') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) + with tmpdir.as_cwd(): + yield tmpdir - self.old_enable_site = site.ENABLE_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - site.ENABLE_USER_SITE = self.old_enable_site - easy_install_pkg.__file__ = self.old_file +@pytest.mark.usefixtures("user_override") +@pytest.mark.usefixtures("setup_context") +class TestUserInstallTest: + @mock.patch('setuptools.command.easy_install.__file__', None) def test_user_install_implied(self): + easy_install_pkg.__file__ = site.USER_SITE site.ENABLE_USER_SITE = True # disabled sometimes #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.args = ['py'] cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') + assert cmd.user, 'user should be implied' def test_multiproc_atexit(self): try: @@ -178,10 +182,10 @@ class TestUserInstallTest(unittest.TestCase): #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.args = ['py'] cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') + assert not cmd.user, 'NOT user should be implied' def test_local_index(self): # make sure the local index is used @@ -190,11 +194,8 @@ class TestUserInstallTest(unittest.TestCase): new_location = tempfile.mkdtemp() target = tempfile.mkdtemp() egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: + with open(egg_file, 'w') as f: f.write('Name: foo\n') - finally: - f.close() sys.path.append(target) old_ppath = os.environ.get('PYTHONPATH') @@ -202,14 +203,15 @@ class TestUserInstallTest(unittest.TestCase): try: dist = Distribution() dist.script_name = 'setup.py' - cmd = easy_install(dist) + cmd = ei.easy_install(dist) cmd.install_dir = target cmd.args = ['foo'] cmd.ensure_finalized() cmd.local_index.scan([new_location]) res = cmd.easy_install('foo') - self.assertEqual(os.path.realpath(res.location), - os.path.realpath(new_location)) + actual = os.path.normcase(os.path.realpath(res.location)) + expected = os.path.normcase(os.path.realpath(new_location)) + assert actual == expected finally: sys.path.remove(target) for basedir in [new_location, target, ]: @@ -224,6 +226,25 @@ class TestUserInstallTest(unittest.TestCase): else: del os.environ['PYTHONPATH'] + @contextlib.contextmanager + def user_install_setup_context(self, *args, **kwargs): + """ + Wrap sandbox.setup_context to patch easy_install in that context to + appear as user-installed. + """ + with self.orig_context(*args, **kwargs): + import setuptools.command.easy_install as ei + ei.__file__ = site.USER_SITE + yield + + def patched_setup_context(self): + self.orig_context = sandbox.setup_context + + return mock.patch( + 'setuptools.sandbox.setup_context', + self.user_install_setup_context, + ) + def test_setup_requires(self): """Regression test for Distribute issue #318 @@ -232,18 +253,37 @@ class TestUserInstallTest(unittest.TestCase): SandboxViolation. """ - test_pkg = create_setup_requires_package(self.dir) + test_pkg = create_setup_requires_package(os.getcwd()) test_setup_py = os.path.join(test_pkg, 'setup.py') try: - with quiet_context(): - with reset_setup_stop_context(): + with contexts.quiet(): + with self.patched_setup_context(): run_setup(test_setup_py, ['install']) - except SandboxViolation: - self.fail('Installation caused SandboxViolation') + except IndexError: + # Test fails in some cases due to bugs in Python + # See https://bitbucket.org/pypa/setuptools/issue/201 + pass -class TestSetupRequires(unittest.TestCase): +@pytest.yield_fixture +def distutils_package(): + distutils_setup_py = SETUP_PY.replace( + 'from setuptools import setup', + 'from distutils.core import setup', + ) + with contexts.tempdir(cd=os.chdir): + with open('setup.py', 'w') as f: + f.write(distutils_setup_py) + yield + + +class TestDistutilsPackage: + def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): + run_setup('setup.py', ['bdist_egg']) + + +class TestSetupRequires: def test_setup_requires_honors_fetch_params(self): """ @@ -260,25 +300,27 @@ class TestSetupRequires(unittest.TestCase): # Some platforms (Jython) don't find a port to which to bind, # so skip this test for them. return - with quiet_context(): + with contexts.quiet(): # create an sdist that has a build-time dependency. with TestSetupRequires.create_sdist() as dist_file: - with tempdir_context() as temp_install_dir: - with environment_context(PYTHONPATH=temp_install_dir): - ei_params = ['--index-url', p_index.url, + with contexts.tempdir() as temp_install_dir: + with contexts.environment(PYTHONPATH=temp_install_dir): + ei_params = [ + '--index-url', p_index.url, '--allow-hosts', p_index_loc, - '--exclude-scripts', '--install-dir', temp_install_dir, - dist_file] - with reset_setup_stop_context(): - with argv_context(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - self.assertRaises(SystemExit, - easy_install_pkg.main, ei_params) + '--exclude-scripts', + '--install-dir', temp_install_dir, + dist_file, + ] + with contexts.argv(['easy_install']): + # attempt to install the dist. It should fail because + # it doesn't exist. + with pytest.raises(SystemExit): + easy_install_pkg.main(ei_params) # there should have been two or three requests to the server # (three happens on Python 3.3a) - self.assertTrue(2 <= len(p_index.requests) <= 3) - self.assertEqual(p_index.requests[0].path, '/does-not-exist/') + assert 2 <= len(p_index.requests) <= 3 + assert p_index.requests[0].path == '/does-not-exist/' @staticmethod @contextlib.contextmanager @@ -287,18 +329,17 @@ class TestSetupRequires(unittest.TestCase): Return an sdist with a setup_requires dependency (of something that doesn't exist) """ - with tempdir_context() as dir: + with contexts.tempdir() as dir: dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_trivial_sdist( - dist_path, - textwrap.dedent(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """).lstrip()) + script = DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-fetcher", + version="1.0", + setup_requires = ['does-not-exist'], + ) + """) + make_trivial_sdist(dist_path, script) yield dist_path def test_setup_requires_overrides_version_conflict(self): @@ -316,22 +357,17 @@ class TestSetupRequires(unittest.TestCase): working_set.add(fake_dist) try: - with tempdir_context() as temp_dir: + with contexts.tempdir() as temp_dir: test_pkg = create_setup_requires_package(temp_dir) test_setup_py = os.path.join(test_pkg, 'setup.py') - with quiet_context() as (stdout, stderr): - with reset_setup_stop_context(): - try: - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, ['--name']) - except VersionConflict: - self.fail('Installing setup.py requirements ' - 'caused a VersionConflict') + with contexts.quiet() as (stdout, stderr): + # Don't even need to install the package, just + # running the setup.py at all is sufficient + run_setup(test_setup_py, ['--name']) lines = stdout.readlines() - self.assertTrue(len(lines) > 0) - self.assertTrue(lines[-1].strip(), 'test_pkg') + assert len(lines) > 0 + assert lines[-1].strip(), 'test_pkg' finally: pkg_resources.__setstate__(pr_state) @@ -352,17 +388,16 @@ def create_setup_requires_package(path): test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) - f = open(test_setup_py, 'w') - f.write(textwrap.dedent("""\ - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - f.close() + with open(test_setup_py, 'w') as f: + f.write(DALS(""" + import setuptools + setuptools.setup(**%r) + """ % test_setup_attrs)) foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') make_trivial_sdist( foobar_path, - textwrap.dedent("""\ + DALS(""" import setuptools setuptools.setup( name='foobar', @@ -386,71 +421,127 @@ def make_trivial_sdist(dist_path, setup_py): MemFile = StringIO setup_py_bytes = MemFile(setup_py.encode('utf-8')) setup_py_file.size = len(setup_py_bytes.getvalue()) - dist = tarfile.open(dist_path, 'w:gz') - try: + with tarfile_open(dist_path, 'w:gz') as dist: dist.addfile(setup_py_file, fileobj=setup_py_bytes) - finally: - dist.close() -@contextlib.contextmanager -def tempdir_context(cd=lambda dir:None): - temp_dir = tempfile.mkdtemp() - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) +class TestScriptHeader: + non_ascii_exe = '/Users/José/bin/python' + exe_with_spaces = r'C:\Program Files\Python33\python.exe' -@contextlib.contextmanager -def environment_context(**updates): - old_env = os.environ.copy() - os.environ.update(updates) - try: - yield - finally: - for key in updates: - del os.environ[key] - os.environ.update(old_env) + @pytest.mark.skipif( + sys.platform.startswith('java') and ei.is_sh(sys.executable), + reason="Test cannot run under java when executable is sh" + ) + def test_get_script_header(self): + expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) + actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python') + assert actual == expected -@contextlib.contextmanager -def argv_context(repl): - old_argv = sys.argv[:] - sys.argv[:] = repl - yield - sys.argv[:] = old_argv + expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath + (sys.executable)) + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x') + assert actual == expected -@contextlib.contextmanager -def reset_setup_stop_context(): - """ - When the setuptools tests are run using setup.py test, and then - one wants to invoke another setup() command (such as easy_install) - within those tests, it's necessary to reset the global variable - in distutils.core so that the setup() command will run naturally. - """ - setup_stop_after = distutils.core._setup_stop_after - distutils.core._setup_stop_after = None - yield - distutils.core._setup_stop_after = setup_stop_after + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable=self.non_ascii_exe) + expected = '#!%s -x\n' % self.non_ascii_exe + assert actual == expected + + actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable='"'+self.exe_with_spaces+'"') + expected = '#!"%s"\n' % self.exe_with_spaces + assert actual == expected + + @pytest.mark.xfail( + compat.PY3 and os.environ.get("LC_CTYPE") in ("C", "POSIX"), + reason="Test fails in this locale on Python 3" + ) + @mock.patch.dict(sys.modules, java=mock.Mock(lang=mock.Mock(System= + mock.Mock(getProperty=mock.Mock(return_value=""))))) + @mock.patch('sys.platform', 'java1.5.0_13') + def test_get_script_header_jython_workaround(self, tmpdir): + # Create a mock sys.executable that uses a shebang line + header = DALS(""" + #!/usr/bin/python + # -*- coding: utf-8 -*- + """) + exe = tmpdir / 'exe.py' + with exe.open('w') as f: + f.write(header) + exe = str(exe) + + header = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python', + executable=exe) + assert header == '#!/usr/bin/env %s\n' % exe + + expect_out = 'stdout' if sys.version_info < (2,7) else 'stderr' + + with contexts.quiet() as (stdout, stderr): + # When options are included, generate a broken shebang line + # with a warning emitted + candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x', + executable=exe) + assert candidate == '#!%s -x\n' % exe + output = locals()[expect_out] + assert 'Unable to adapt shebang line' in output.getvalue() + + with contexts.quiet() as (stdout, stderr): + candidate = ei.ScriptWriter.get_script_header('#!/usr/bin/python', + executable=self.non_ascii_exe) + assert candidate == '#!%s -x\n' % self.non_ascii_exe + output = locals()[expect_out] + assert 'Unable to adapt shebang line' in output.getvalue() -@contextlib.contextmanager -def quiet_context(): - """ - Redirect stdout/stderr to StringIO objects to prevent console output from - distutils commands. - """ +class TestCommandSpec: + def test_custom_launch_command(self): + """ + Show how a custom CommandSpec could be used to specify a #! executable + which takes parameters. + """ + cmd = ei.CommandSpec(['/usr/bin/env', 'python3']) + assert cmd.as_header() == '#!/usr/bin/env python3\n' - old_stdout = sys.stdout - old_stderr = sys.stderr - new_stdout = sys.stdout = StringIO() - new_stderr = sys.stderr = StringIO() - try: - yield new_stdout, new_stderr - finally: - new_stdout.seek(0) - new_stderr.seek(0) - sys.stdout = old_stdout - sys.stderr = old_stderr + def test_from_param_for_CommandSpec_is_passthrough(self): + """ + from_param should return an instance of a CommandSpec + """ + cmd = ei.CommandSpec(['python']) + cmd_new = ei.CommandSpec.from_param(cmd) + assert cmd is cmd_new + + def test_from_environment_with_spaces_in_executable(self): + with mock.patch('sys.executable', TestScriptHeader.exe_with_spaces): + cmd = ei.CommandSpec.from_environment() + assert len(cmd) == 1 + assert cmd.as_header().startswith('#!"') + + def test_from_simple_string_uses_shlex(self): + """ + In order to support `executable = /usr/bin/env my-python`, make sure + from_param invokes shlex on that input. + """ + cmd = ei.CommandSpec.from_param('/usr/bin/env my-python') + assert len(cmd) == 2 + assert '"' not in cmd.as_header() + + def test_sys_executable(self): + """ + CommandSpec.from_string(sys.executable) should contain just that param. + """ + writer = ei.ScriptWriter.best() + cmd = writer.command_spec_class.from_string(sys.executable) + assert len(cmd) == 1 + assert cmd[0] == sys.executable + + +class TestWindowsScriptWriter: + def test_header(self): + hdr = ei.WindowsScriptWriter.get_script_header('') + assert hdr.startswith('#!') + assert hdr.endswith('\n') + hdr = hdr.lstrip('#!') + hdr = hdr.rstrip('\n') + # header should not start with an escaped quote + assert not hdr.startswith('\\"') diff --git a/awx/lib/site-packages/setuptools/tests/test_egg_info.py b/awx/lib/site-packages/setuptools/tests/test_egg_info.py index 278543662f..a1caf9fd30 100644 --- a/awx/lib/site-packages/setuptools/tests/test_egg_info.py +++ b/awx/lib/site-packages/setuptools/tests/test_egg_info.py @@ -1,173 +1,98 @@ - import os -import sys -import tempfile -import shutil -import unittest +import stat -import pkg_resources -import warnings -from setuptools.command import egg_info -from setuptools import svn_utils -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf +import pytest -ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') -"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" +from . import environment +from .textwrap import DALS +from . import contexts -class TestEggInfo(unittest.TestCase): +class TestEggInfo: - def setUp(self): - self.test_dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.test_dir, '.svn')) + setup_script = DALS(""" + from setuptools import setup - self.old_cwd = os.getcwd() - os.chdir(self.test_dir) + setup( + name='foo', + py_modules=['hello'], + entry_points={'console_scripts': ['hi = hello.run']}, + zip_safe=False, + ) + """) - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.test_dir) + def _create_project(self): + with open('setup.py', 'w') as f: + f.write(self.setup_script) - def _write_entries(self, entries): - fn = os.path.join(self.test_dir, '.svn', 'entries') - entries_f = open(fn, 'wb') - entries_f.write(entries) - entries_f.close() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_version_10_format(self): - """ - """ - #keeping this set for 1.6 is a good check on the get_svn_revision - #to ensure I return using svnversion what would had been returned - version_str = svn_utils.SvnInfo.get_svn_version() - version = [int(x) for x in version_str.split('.')[:2]] - if version != [1, 6]: - if hasattr(self, 'skipTest'): - self.skipTest('') - else: - sys.stderr.write('\n Skipping due to SVN Version\n') - return + with open('hello.py', 'w') as f: + f.write(DALS(""" + def run(): + print('hello') + """)) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - self.assertEqual(rev, '89000') + @pytest.yield_fixture + def env(self): + class Environment(str): pass - def test_version_10_format_legacy_parser(self): - """ - """ - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env + with contexts.tempdir(prefix='setuptools-test.') as env_dir: + env = Environment(env_dir) + os.chmod(env_dir, stat.S_IRWXU) + subs = 'home', 'lib', 'scripts', 'data', 'egg-base' + env.paths = dict( + (dirname, os.path.join(env_dir, dirname)) + for dirname in subs + ) + list(map(os.mkdir, env.paths.values())) + config = os.path.join(env.paths['home'], '.pydistutils.cfg') + with open(config, 'w') as f: + f.write(DALS(""" + [egg_info] + egg-base = %(egg-base)s + """ % env.paths + )) + yield env - if path_variable: - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - #catch_warnings not available until py26 - warning_filters = warnings.filters - warnings.filters = warning_filters[:] - try: - warnings.simplefilter("ignore", DeprecationWarning) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - finally: - #restore the warning filters - warnings.filters = warning_filters - #restore the os path - if path_variable: - os.environ[path_variable] = old_path + def test_egg_base_installed_egg_info(self, tmpdir_cwd, env): + self._create_project() - self.assertEqual(rev, '89000') - -DUMMY_SOURCE_TXT = """CHANGES.txt -CONTRIBUTORS.txt -HISTORY.txt -LICENSE -MANIFEST.in -README.txt -setup.py -dummy/__init__.py -dummy/test.txt -dummy.egg-info/PKG-INFO -dummy.egg-info/SOURCES.txt -dummy.egg-info/dependency_links.txt -dummy.egg-info/top_level.txt""" - - -class TestSvnDummy(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return None - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummy, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=1) + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + cmd = [ + 'install', + '--home', env.paths['home'], + '--install-lib', env.paths['lib'], + '--install-scripts', env.paths['scripts'], + '--install-data', env.paths['data'], + ] + code, data = environment.run_setup_py( + cmd=cmd, + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) if code: raise AssertionError(data) - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile + actual = self._find_egg_info_files(env.paths['lib']) - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) + expected = [ + 'PKG-INFO', + 'SOURCES.txt', + 'dependency_links.txt', + 'entry_points.txt', + 'not-zip-safe', + 'top_level.txt', + ] + assert sorted(actual) == expected - return data - - -class TestSvnDummyLegacy(environment.ZippedEnvironment): - - def setUp(self): - self.base_version = (1, 6) - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummyLegacy, self).setUp() - - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - path="", - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) + def _find_egg_info_files(self, root): + results = ( + filenames + for dirpath, dirnames, filenames in os.walk(root) + if os.path.basename(dirpath) == 'EGG-INFO' + ) + # expect exactly one result + result, = results + return result diff --git a/awx/lib/site-packages/setuptools/tests/test_find_packages.py b/awx/lib/site-packages/setuptools/tests/test_find_packages.py new file mode 100644 index 0000000000..06a7c02e46 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_find_packages.py @@ -0,0 +1,170 @@ +"""Tests for setuptools.find_packages().""" +import os +import sys +import shutil +import tempfile +import platform + +import pytest + +import setuptools +from setuptools import find_packages + +find_420_packages = setuptools.PEP420PackageFinder.find + +# modeled after CPython's test.support.can_symlink +def can_symlink(): + TESTFN = tempfile.mktemp() + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + globals().update(can_symlink=lambda: can) + return can + +def has_symlink(): + bad_symlink = ( + # Windows symlink directory detection is broken on Python 3.2 + platform.system() == 'Windows' and sys.version_info[:2] == (3,2) + ) + return can_symlink() and not bad_symlink + +class TestFindPackages: + + def setup_method(self, method): + self.dist_dir = tempfile.mkdtemp() + self._make_pkg_structure() + + def teardown_method(self, method): + shutil.rmtree(self.dist_dir) + + def _make_pkg_structure(self): + """Make basic package structure. + + dist/ + docs/ + conf.py + pkg/ + __pycache__/ + nspkg/ + mod.py + subpkg/ + assets/ + asset + __init__.py + setup.py + + """ + self.docs_dir = self._mkdir('docs', self.dist_dir) + self._touch('conf.py', self.docs_dir) + self.pkg_dir = self._mkdir('pkg', self.dist_dir) + self._mkdir('__pycache__', self.pkg_dir) + self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) + self._touch('mod.py', self.ns_pkg_dir) + self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) + self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) + self._touch('asset', self.asset_dir) + self._touch('__init__.py', self.sub_pkg_dir) + self._touch('setup.py', self.dist_dir) + + def _mkdir(self, path, parent_dir=None): + if parent_dir: + path = os.path.join(parent_dir, path) + os.mkdir(path) + return path + + def _touch(self, path, dir_=None): + if dir_: + path = os.path.join(dir_, path) + fp = open(path, 'w') + fp.close() + return path + + def test_regular_package(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir) + assert packages == ['pkg', 'pkg.subpkg'] + + def test_exclude(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg.*',)) + assert packages == ['pkg'] + + def test_include_excludes_other(self): + """ + If include is specified, other packages should be excluded. + """ + self._touch('__init__.py', self.pkg_dir) + alt_dir = self._mkdir('other_pkg', self.dist_dir) + self._touch('__init__.py', alt_dir) + packages = find_packages(self.dist_dir, include=['other_pkg']) + assert packages == ['other_pkg'] + + def test_dir_with_dot_is_skipped(self): + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + data_dir = self._mkdir('some.data', self.pkg_dir) + self._touch('__init__.py', data_dir) + self._touch('file.dat', data_dir) + packages = find_packages(self.dist_dir) + assert 'pkg.some.data' not in packages + + def test_dir_with_packages_in_subdir_is_excluded(self): + """ + Ensure that a package in a non-package such as build/pkg/__init__.py + is excluded. + """ + build_dir = self._mkdir('build', self.dist_dir) + build_pkg_dir = self._mkdir('pkg', build_dir) + self._touch('__init__.py', build_pkg_dir) + packages = find_packages(self.dist_dir) + assert 'build.pkg' not in packages + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self): + """ + A symbolically-linked directory should be treated like any other + directory when matched as a package. + + Create a link from lpkg -> pkg. + """ + self._touch('__init__.py', self.pkg_dir) + linked_pkg = os.path.join(self.dist_dir, 'lpkg') + os.symlink('pkg', linked_pkg) + assert os.path.isdir(linked_pkg) + packages = find_packages(self.dist_dir) + assert 'lpkg' in packages + + def _assert_packages(self, actual, expected): + assert set(actual) == set(expected) + + def test_pep420_ns_package(self): + packages = find_420_packages( + self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes(self): + packages = find_420_packages( + self.dist_dir, exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes_or_excludes(self): + packages = find_420_packages(self.dist_dir) + expected = [ + 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] + self._assert_packages(packages, expected) + + def test_regular_package_with_nested_pep420_ns_packages(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_420_packages( + self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_non_package_dirs(self): + shutil.rmtree(self.docs_dir) + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + packages = find_420_packages(self.dist_dir) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) diff --git a/awx/lib/site-packages/setuptools/tests/test_integration.py b/awx/lib/site-packages/setuptools/tests/test_integration.py new file mode 100644 index 0000000000..90bb43136c --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_integration.py @@ -0,0 +1,99 @@ +"""Run some integration tests. + +Try to install a few packages. +""" + +import glob +import os +import sys + +import pytest + +from setuptools.command.easy_install import easy_install +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution +from setuptools.compat import urlopen + + +def setup_module(module): + packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient' + for pkg in packages: + try: + __import__(pkg) + tmpl = "Integration tests cannot run when {pkg} is installed" + pytest.skip(tmpl.format(**locals())) + except ImportError: + pass + + try: + urlopen('https://pypi.python.org/pypi') + except Exception as exc: + pytest.skip(reason=str(exc)) + + +@pytest.fixture +def install_context(request, tmpdir, monkeypatch): + """Fixture to set up temporary installation directory. + """ + # Save old values so we can restore them. + new_cwd = tmpdir.mkdir('cwd') + user_base = tmpdir.mkdir('user_base') + user_site = tmpdir.mkdir('user_site') + install_dir = tmpdir.mkdir('install_dir') + + def fin(): + # undo the monkeypatch, particularly needed under + # windows because of kept handle on cwd + monkeypatch.undo() + new_cwd.remove() + user_base.remove() + user_site.remove() + install_dir.remove() + request.addfinalizer(fin) + + # Change the environment and site settings to control where the + # files are installed and ensure we do not overwrite anything. + monkeypatch.chdir(new_cwd) + monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath) + monkeypatch.setattr('site.USER_BASE', user_base.strpath) + monkeypatch.setattr('site.USER_SITE', user_site.strpath) + monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) + monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path)) + + # Set up the command for performing the installation. + dist = Distribution() + cmd = easy_install(dist) + cmd.install_dir = install_dir.strpath + return cmd + + +def _install_one(requirement, cmd, pkgname, modulename): + cmd.args = [requirement] + cmd.ensure_finalized() + cmd.run() + target = cmd.install_dir + dest_path = glob.glob(os.path.join(target, pkgname + '*.egg')) + assert dest_path + assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename)) + + +def test_stevedore(install_context): + _install_one('stevedore', install_context, + 'stevedore', 'extension.py') + + +@pytest.mark.xfail +def test_virtualenvwrapper(install_context): + _install_one('virtualenvwrapper', install_context, + 'virtualenvwrapper', 'hook_loader.py') + + +def test_pbr(install_context): + _install_one('pbr', install_context, + 'pbr', 'core.py') + + +@pytest.mark.xfail +def test_python_novaclient(install_context): + _install_one('python-novaclient', install_context, + 'novaclient', 'base.py') diff --git a/awx/lib/site-packages/setuptools/tests/test_markerlib.py b/awx/lib/site-packages/setuptools/tests/test_markerlib.py index dae71cba46..8197b49dc4 100644 --- a/awx/lib/site-packages/setuptools/tests/test_markerlib.py +++ b/awx/lib/site-packages/setuptools/tests/test_markerlib.py @@ -1,48 +1,43 @@ import os -import unittest -from setuptools.tests.py26compat import skipIf -try: - import ast -except ImportError: - pass +import pytest -class TestMarkerlib(unittest.TestCase): - @skipIf('ast' not in globals(), - "ast not available (Python < 2.6?)") +class TestMarkerlib: + + @pytest.mark.importorskip('ast') def test_markers(self): from _markerlib import interpret, default_environment, compile - + os_name = os.name - - self.assertTrue(interpret("")) - - self.assertTrue(interpret("os.name != 'buuuu'")) - self.assertTrue(interpret("os_name != 'buuuu'")) - self.assertTrue(interpret("python_version > '1.0'")) - self.assertTrue(interpret("python_version < '5.0'")) - self.assertTrue(interpret("python_version <= '5.0'")) - self.assertTrue(interpret("python_version >= '1.0'")) - self.assertTrue(interpret("'%s' in os.name" % os_name)) - self.assertTrue(interpret("'%s' in os_name" % os_name)) - self.assertTrue(interpret("'buuuu' not in os.name")) - - self.assertFalse(interpret("os.name == 'buuuu'")) - self.assertFalse(interpret("os_name == 'buuuu'")) - self.assertFalse(interpret("python_version < '1.0'")) - self.assertFalse(interpret("python_version > '5.0'")) - self.assertFalse(interpret("python_version >= '5.0'")) - self.assertFalse(interpret("python_version <= '1.0'")) - self.assertFalse(interpret("'%s' not in os.name" % os_name)) - self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) - self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) - + + assert interpret("") + + assert interpret("os.name != 'buuuu'") + assert interpret("os_name != 'buuuu'") + assert interpret("python_version > '1.0'") + assert interpret("python_version < '5.0'") + assert interpret("python_version <= '5.0'") + assert interpret("python_version >= '1.0'") + assert interpret("'%s' in os.name" % os_name) + assert interpret("'%s' in os_name" % os_name) + assert interpret("'buuuu' not in os.name") + + assert not interpret("os.name == 'buuuu'") + assert not interpret("os_name == 'buuuu'") + assert not interpret("python_version < '1.0'") + assert not interpret("python_version > '5.0'") + assert not interpret("python_version >= '5.0'") + assert not interpret("python_version <= '1.0'") + assert not interpret("'%s' not in os.name" % os_name) + assert not interpret("'buuuu' in os.name and python_version >= '5.0'") + assert not interpret("'buuuu' in os_name and python_version >= '5.0'") + environment = default_environment() environment['extra'] = 'test' - self.assertTrue(interpret("extra == 'test'", environment)) - self.assertFalse(interpret("extra == 'doc'", environment)) - + assert interpret("extra == 'test'", environment) + assert not interpret("extra == 'doc'", environment) + def raises_nameError(): try: interpret("python.version == '42'") @@ -50,9 +45,9 @@ class TestMarkerlib(unittest.TestCase): pass else: raise Exception("Expected NameError") - + raises_nameError() - + def raises_syntaxError(): try: interpret("(x for x in (4,))") @@ -60,9 +55,9 @@ class TestMarkerlib(unittest.TestCase): pass else: raise Exception("Expected SyntaxError") - + raises_syntaxError() - + statement = "python_version == '5'" - self.assertEqual(compile(statement).__doc__, statement) - + assert compile(statement).__doc__ == statement + diff --git a/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py b/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py new file mode 100644 index 0000000000..09e0460c56 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_msvc9compiler.py @@ -0,0 +1,179 @@ +""" +Tests for msvc9compiler. +""" + +import os +import contextlib +import distutils.errors + +import pytest +try: + from unittest import mock +except ImportError: + import mock + +from . import contexts + +# importing only setuptools should apply the patch +__import__('setuptools') + +pytest.importorskip("distutils.msvc9compiler") + + +def mock_reg(hkcu=None, hklm=None): + """ + Return a mock for distutils.msvc9compiler.Reg, patched + to mock out the functions that access the registry. + """ + + _winreg = getattr(distutils.msvc9compiler, '_winreg', None) + winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) + + hives = { + winreg.HKEY_CURRENT_USER: hkcu or {}, + winreg.HKEY_LOCAL_MACHINE: hklm or {}, + } + + @classmethod + def read_keys(cls, base, key): + """Return list of registry keys.""" + hive = hives.get(base, {}) + return [ + k.rpartition('\\')[2] + for k in hive if k.startswith(key.lower()) + ] + + @classmethod + def read_values(cls, base, key): + """Return dict of registry keys and values.""" + hive = hives.get(base, {}) + return dict( + (k.rpartition('\\')[2], hive[k]) + for k in hive if k.startswith(key.lower()) + ) + + return mock.patch.multiple(distutils.msvc9compiler.Reg, + read_keys=read_keys, read_values=read_values) + + +class TestModulePatch: + """ + Ensure that importing setuptools is sufficient to replace + the standard find_vcvarsall function with a version that + recognizes the "Visual C++ for Python" package. + """ + + key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' + key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir' + + def test_patched(self): + "Test the module is actually patched" + mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ + assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched" + + def test_no_registry_entryies_means_nothing_found(self): + """ + No registry entries or environment variable should lead to an error + directing the user to download vcpython27. + """ + find_vcvarsall = distutils.msvc9compiler.find_vcvarsall + query_vcvarsall = distutils.msvc9compiler.query_vcvarsall + + with contexts.environment(VS90COMNTOOLS=None): + with mock_reg(): + assert find_vcvarsall(9.0) is None + + expected = distutils.errors.DistutilsPlatformError + with pytest.raises(expected) as exc: + query_vcvarsall(9.0) + assert 'aka.ms/vcpython27' in str(exc) + + @pytest.yield_fixture + def user_preferred_setting(self): + """ + Set up environment with different install dirs for user vs. system + and yield the user_install_dir for the expected result. + """ + with self.mock_install_dir() as user_install_dir: + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hkcu={ + self.key_32: user_install_dir, + }, + hklm={ + self.key_32: system_install_dir, + self.key_64: system_install_dir, + }, + ) + with reg: + yield user_install_dir + + def test_prefer_current_user(self, user_preferred_setting): + """ + Ensure user's settings are preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(user_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.yield_fixture + def local_machine_setting(self): + """ + Set up environment with only the system environment configured. + """ + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hklm={ + self.key_32: system_install_dir, + }, + ) + with reg: + yield system_install_dir + + def test_local_machine_recognized(self, local_machine_setting): + """ + Ensure machine setting is honored if user settings are not present. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(local_machine_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.yield_fixture + def x64_preferred_setting(self): + """ + Set up environment with 64-bit and 32-bit system settings configured + and yield the canonical location. + """ + with self.mock_install_dir() as x32_dir: + with self.mock_install_dir() as x64_dir: + reg = mock_reg( + hklm={ + # This *should* only exist on 32-bit machines + self.key_32: x32_dir, + # This *should* only exist on 64-bit machines + self.key_64: x64_dir, + }, + ) + with reg: + yield x32_dir + + def test_ensure_64_bit_preferred(self, x64_preferred_setting): + """ + Ensure 64-bit system key is preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @staticmethod + @contextlib.contextmanager + def mock_install_dir(): + """ + Make a mock install dir in a unique location so that tests can + distinguish which dir was detected in a given scenario. + """ + with contexts.tempdir() as result: + vcvarsall = os.path.join(result, 'vcvarsall.bat') + with open(vcvarsall, 'w'): + pass + yield result diff --git a/awx/lib/site-packages/setuptools/tests/test_packageindex.py b/awx/lib/site-packages/setuptools/tests/test_packageindex.py index 664566a36c..dcd90d6fe7 100644 --- a/awx/lib/site-packages/setuptools/tests/test_packageindex.py +++ b/awx/lib/site-packages/setuptools/tests/test_packageindex.py @@ -1,26 +1,24 @@ -"""Package Index Tests -""" import sys -import os -import unittest -import pkg_resources -from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url import distutils.errors + +from setuptools.compat import httplib, HTTPError, unicode, pathname2url + +import pkg_resources import setuptools.package_index from setuptools.tests.server import IndexServer -class TestPackageIndex(unittest.TestCase): + +class TestPackageIndex: def test_bad_url_bad_port(self): index = setuptools.package_index.PackageIndex() url = 'http://127.0.0.1:0/nonesuch/test_package_index' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) + except Exception as v: + assert url in str(v) else: - self.assertTrue(isinstance(v, HTTPError)) + assert isinstance(v, HTTPError) def test_bad_url_typo(self): # issue 16 @@ -33,11 +31,10 @@ class TestPackageIndex(unittest.TestCase): url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) + except Exception as v: + assert url in str(v) else: - self.assertTrue(isinstance(v, HTTPError)) + assert isinstance(v, HTTPError) def test_bad_url_bad_status_line(self): index = setuptools.package_index.PackageIndex( @@ -51,9 +48,8 @@ class TestPackageIndex(unittest.TestCase): url = 'http://example.com' try: v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue('line' in str(v)) + except Exception as v: + assert 'line' in str(v) else: raise AssertionError('Should have raise here!') @@ -69,8 +65,7 @@ class TestPackageIndex(unittest.TestCase): url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' try: index.open_url(url) - except distutils.errors.DistutilsError: - error = sys.exc_info()[1] + except distutils.errors.DistutilsError as error: msg = unicode(error) assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg return @@ -94,7 +89,7 @@ class TestPackageIndex(unittest.TestCase): hosts=('www.example.com',) ) url = 'file:///tmp/test_package_index' - self.assertTrue(index.url_ok(url, True)) + assert index.url_ok(url, True) def test_links_priority(self): """ @@ -127,21 +122,30 @@ class TestPackageIndex(unittest.TestCase): server.stop() # the distribution has been found - self.assertTrue('foobar' in pi) + assert 'foobar' in pi # we have only one link, because links are compared without md5 - self.assertTrue(len(pi['foobar'])==1) + assert len(pi['foobar'])==1 # the link should be from the index - self.assertTrue('correct_md5' in pi['foobar'][0].location) + assert 'correct_md5' in pi['foobar'][0].location def test_parse_bdist_wininst(self): - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) + parse = setuptools.package_index.parse_bdist_wininst + + actual = parse('reportlab-2.5.win32-py2.4.exe') + expected = 'reportlab-2.5', '2.4', 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win32.exe') + expected = 'reportlab-2.5', None, 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64-py2.7.exe') + expected = 'reportlab-2.5', '2.7', 'win-amd64' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64.exe') + expected = 'reportlab-2.5', None, 'win-amd64' + assert actual == expected def test__vcs_split_rev_from_url(self): """ @@ -149,55 +153,51 @@ class TestPackageIndex(unittest.TestCase): """ vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url url, rev = vsrfu('https://example.com/bar@2995') - self.assertEqual(url, 'https://example.com/bar') - self.assertEqual(rev, '2995') + assert url == 'https://example.com/bar' + assert rev == '2995' - def test_local_index(self): + def test_local_index(self, tmpdir): """ local_open should be able to read an index from the file system. """ - f = open('index.html', 'w') - f.write('<div>content</div>') - f.close() - try: - url = 'file:' + pathname2url(os.getcwd()) + '/' - res = setuptools.package_index.local_open(url) - finally: - os.remove('index.html') + index_file = tmpdir / 'index.html' + with index_file.open('w') as f: + f.write('<div>content</div>') + url = 'file:' + pathname2url(str(tmpdir)) + '/' + res = setuptools.package_index.local_open(url) assert 'content' in res.read() -class TestContentCheckers(unittest.TestCase): +class TestContentCheckers: def test_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') checker.feed('You should probably not be using MD5'.encode('ascii')) - self.assertEqual(checker.hash.hexdigest(), - 'f12895fdffbd45007040d2e44df98478') - self.assertTrue(checker.is_valid()) + assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' + assert checker.is_valid() def test_other_fragment(self): "Content checks should succeed silently if no hash is present" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#something%20completely%20different') checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) + assert checker.is_valid() def test_blank_md5(self): "Content checks should succeed if a hash is empty" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=') checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) + assert checker.is_valid() def test_get_hash_name_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - self.assertEqual(checker.hash_name, 'md5') + assert checker.hash_name == 'md5' def test_report(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') rep = checker.report(lambda x: x, 'My message about %s') - self.assertEqual(rep, 'My message about md5') + assert rep == 'My message about md5' diff --git a/awx/lib/site-packages/setuptools/tests/test_sandbox.py b/awx/lib/site-packages/setuptools/tests/test_sandbox.py index 3dad137683..6e1e9e1cd2 100644 --- a/awx/lib/site-packages/setuptools/tests/test_sandbox.py +++ b/awx/lib/site-packages/setuptools/tests/test_sandbox.py @@ -1,69 +1,43 @@ """develop tests """ -import sys import os -import shutil -import unittest -import tempfile import types +import pytest + import pkg_resources import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox, SandboxViolation +from setuptools.sandbox import DirectorySandbox -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True -class TestSandbox(unittest.TestCase): +class TestSandbox: - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) + def test_devnull(self, tmpdir): + sandbox = DirectorySandbox(str(tmpdir)) sandbox.run(self._file_writer(os.devnull)) + @staticmethod def _file_writer(path): def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() + with open(path, 'w') as f: + f.write('xxx') return do_write - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) + def test_win32com(self, tmpdir): + """ + win32com should not be prevented from caching COM interfaces + in gen_py. + """ + win32com = pytest.importorskip('win32com') + gen_py = win32com.__gen_path__ + target = os.path.join(gen_py, 'test_write') + sandbox = DirectorySandbox(str(tmpdir)) + try: + # attempt to create gen_py file + sandbox.run(self._file_writer(target)) + finally: + if os.path.exists(target): + os.remove(target) def test_setup_py_with_BOM(self): """ @@ -72,8 +46,57 @@ class TestSandbox(unittest.TestCase): target = pkg_resources.resource_filename(__name__, 'script-with-bom.py') namespace = types.ModuleType('namespace') - setuptools.sandbox.execfile(target, vars(namespace)) + setuptools.sandbox._execfile(target, vars(namespace)) assert namespace.result == 'passed' -if __name__ == '__main__': - unittest.main() + def test_setup_py_with_CRLF(self, tmpdir): + setup_py = tmpdir / 'setup.py' + with setup_py.open('wb') as stream: + stream.write(b'"degenerate script"\r\n') + setuptools.sandbox._execfile(str(setup_py), globals()) + + +class TestExceptionSaver: + def test_exception_trapped(self): + with setuptools.sandbox.ExceptionSaver(): + raise ValueError("details") + + def test_exception_resumed(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise ValueError("details") + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert str(caught.value) == 'details' + + def test_exception_reconstructed(self): + orig_exc = ValueError("details") + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise orig_exc + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert caught.value is not orig_exc + + def test_no_exception_passes_quietly(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + pass + + saved_exc.resume() + + def test_unpickleable_exception(self): + class CantPickleThis(Exception): + "This Exception is unpickleable because it's not in globals" + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise CantPickleThis('detail') + + with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: + saved_exc.resume() + + assert str(caught.value) == "CantPickleThis('detail',)" diff --git a/awx/lib/site-packages/setuptools/tests/test_sdist.py b/awx/lib/site-packages/setuptools/tests/test_sdist.py index 71d10757bc..9013b505db 100644 --- a/awx/lib/site-packages/setuptools/tests/test_sdist.py +++ b/awx/lib/site-packages/setuptools/tests/test_sdist.py @@ -6,18 +6,16 @@ import os import shutil import sys import tempfile -import unittest import unicodedata -import re -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf +import contextlib -from setuptools.compat import StringIO, unicode -from setuptools.tests.py26compat import skipIf -from setuptools.command.sdist import sdist, walk_revctrl +import pytest + +import pkg_resources +from setuptools.compat import StringIO, unicode, PY3, PY2 +from setuptools.command.sdist import sdist from setuptools.command.egg_info import manifest_maker from setuptools.dist import Distribution -from setuptools import svn_utils SETUP_ATTRS = { 'name': 'sdist_test', @@ -34,32 +32,33 @@ setup(**%r) """ % SETUP_ATTRS -if sys.version_info >= (3,): +if PY3: LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') else: LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' # Cannot use context manager because of Python 2.4 +@contextlib.contextmanager def quiet(): - global old_stdout, old_stderr old_stdout, old_stderr = sys.stdout, sys.stderr sys.stdout, sys.stderr = StringIO(), StringIO() - -def unquiet(): - sys.stdout, sys.stderr = old_stdout, old_stderr + try: + yield + finally: + sys.stdout, sys.stderr = old_stdout, old_stderr # Fake byte literals for Python <= 2.5 def b(s, encoding='utf-8'): - if sys.version_info >= (3,): + if PY3: return s.encode(encoding) return s # Convert to POSIX path def posix(path): - if sys.version_info >= (3,) and not isinstance(path, str): + if PY3 and not isinstance(path, str): return path.replace(os.sep.encode('ascii'), b('/')) else: return path.replace(os.sep, '/') @@ -74,17 +73,18 @@ def decompose(path): path = unicodedata.normalize('NFD', path) path = path.encode('utf-8') except UnicodeError: - pass # Not UTF-8 + pass # Not UTF-8 return path -class TestSdistTest(unittest.TestCase): +class TestSdistTest: - def setUp(self): + def setup_method(self, method): self.temp_dir = tempfile.mkdtemp() f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') f.write(SETUP_PY) f.close() + # Set up the rest of the test package test_pkg = os.path.join(self.temp_dir, 'sdist_test') os.mkdir(test_pkg) @@ -97,7 +97,7 @@ class TestSdistTest(unittest.TestCase): self.old_cwd = os.getcwd() os.chdir(self.temp_dir) - def tearDown(self): + def teardown_method(self, method): os.chdir(self.old_cwd) shutil.rmtree(self.temp_dir) @@ -112,17 +112,40 @@ class TestSdistTest(unittest.TestCase): cmd = sdist(dist) cmd.ensure_finalized() - # squelch output - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() manifest = cmd.filelist.files - self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) + assert os.path.join('sdist_test', 'a.txt') in manifest + assert os.path.join('sdist_test', 'b.txt') in manifest + assert os.path.join('sdist_test', 'c.rst') not in manifest + + + def test_defaults_case_sensitivity(self): + """ + Make sure default files (README.*, etc.) are added in a case-sensitive + way to avoid problems with packages built on Windows. + """ + + open(os.path.join(self.temp_dir, 'readme.rst'), 'w').close() + open(os.path.join(self.temp_dir, 'SETUP.cfg'), 'w').close() + + dist = Distribution(SETUP_ATTRS) + # the extension deliberately capitalized for this test + # to make sure the actual filename (not capitalized) gets added + # to the manifest + dist.script_name = 'setup.PY' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + # lowercase all names so we can test in a case-insensitive way to make sure the files are not included + manifest = map(lambda x: x.lower(), cmd.filelist.files) + assert 'readme.rst' not in manifest, manifest + assert 'setup.py' not in manifest, manifest + assert 'setup.cfg' not in manifest, manifest def test_manifest_is_written_with_utf8_encoding(self): # Test for #303. @@ -135,34 +158,31 @@ class TestSdistTest(unittest.TestCase): # UTF-8 filename filename = os.path.join('sdist_test', 'smörbröd.py') + # Must create the file or it will get stripped. + open(filename, 'w').close() + # Add UTF-8 filename and write manifest - quiet() - try: + with quiet(): mm.run() - mm.filelist.files.append(filename) + mm.filelist.append(filename) mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - u_contents = contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + u_contents = contents.decode('UTF-8') # The manifest should contain the UTF-8 filename - if sys.version_info >= (3,): - self.assertTrue(posix(filename) in u_contents) - else: - self.assertTrue(posix(filename) in contents) + if PY2: + fs_enc = sys.getfilesystemencoding() + filename = filename.decode(fs_enc) + + assert posix(filename) in u_contents # Python 3 only - if sys.version_info >= (3,): + if PY3: def test_write_manifest_allows_utf8_filenames(self): # Test for #303. @@ -175,36 +195,37 @@ class TestSdistTest(unittest.TestCase): # UTF-8 filename filename = os.path.join(b('sdist_test'), b('smörbröd.py')) + # Must touch the file or risk removal + open(filename, "w").close() + # Add filename and write manifest - quiet() - try: + with quiet(): mm.run() u_filename = filename.decode('utf-8') mm.filelist.files.append(u_filename) # Re-write manifest mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + contents.decode('UTF-8') # The manifest should contain the UTF-8 filename - self.assertTrue(posix(filename) in contents) + assert posix(filename) in contents # The filelist should have been updated as well - self.assertTrue(u_filename in mm.filelist.files) + assert u_filename in mm.filelist.files def test_write_manifest_skips_non_utf8_filenames(self): - # Test for #303. + """ + Files that cannot be encoded to UTF-8 (specifically, those that + weren't originally successfully decoded and have surrogate + escapes) should be omitted from the manifest. + See https://bitbucket.org/tarek/distribute/issue/303 for history. + """ dist = Distribution(SETUP_ATTRS) dist.script_name = 'setup.py' mm = manifest_maker(dist) @@ -215,32 +236,25 @@ class TestSdistTest(unittest.TestCase): filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) # Add filename with surrogates and write manifest - quiet() - try: + with quiet(): mm.run() u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.files.append(u_filename) + mm.filelist.append(u_filename) # Re-write manifest mm.write_manifest() - finally: - unquiet() manifest = open(mm.manifest, 'rbU') contents = manifest.read() manifest.close() # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) + contents.decode('UTF-8') # The Latin-1 filename should have been skipped - self.assertFalse(posix(filename) in contents) + assert posix(filename) not in contents # The filelist should have been updated as well - self.assertFalse(u_filename in mm.filelist.files) + assert u_filename not in mm.filelist.files def test_manifest_is_read_with_utf8_encoding(self): # Test for #303. @@ -250,17 +264,14 @@ class TestSdistTest(unittest.TestCase): cmd.ensure_finalized() # Create manifest - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() # Add UTF-8 filename to manifest filename = os.path.join(b('sdist_test'), b('smörbröd.py')) cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) + manifest.write(b('\n') + filename) manifest.close() # The file must exist to be included in the filelist @@ -268,19 +279,16 @@ class TestSdistTest(unittest.TestCase): # Re-read manifest cmd.filelist.files = [] - quiet() - try: + with quiet(): cmd.read_manifest() - finally: - unquiet() # The filelist should contain the UTF-8 filename - if sys.version_info >= (3,): + if PY3: filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files # Python 3 only - if sys.version_info >= (3,): + if PY3: def test_read_manifest_skips_non_utf8_filenames(self): # Test for #303. @@ -290,17 +298,14 @@ class TestSdistTest(unittest.TestCase): cmd.ensure_finalized() # Create manifest - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() # Add Latin-1 filename to manifest filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) + manifest.write(b('\n') + filename) manifest.close() # The file must exist to be included in the filelist @@ -308,22 +313,16 @@ class TestSdistTest(unittest.TestCase): # Re-read manifest cmd.filelist.files = [] - quiet() - try: - try: - cmd.read_manifest() - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - finally: - unquiet() + with quiet(): + cmd.read_manifest() # The Latin-1 filename should have been skipped filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) + assert filename not in cmd.filelist.files - @skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8', - 'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') + @pytest.mark.skipif(PY3 and locale.getpreferredencoding() != 'UTF-8', + reason='Unittest fails if locale is not utf-8 but the manifests is ' + 'recorded correctly') def test_sdist_with_utf8_encoded_filename(self): # Test for #303. dist = Distribution(SETUP_ATTRS) @@ -335,31 +334,28 @@ class TestSdistTest(unittest.TestCase): filename = os.path.join(b('sdist_test'), b('smörbröd.py')) open(filename, 'w').close() - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() if sys.platform == 'darwin': filename = decompose(filename) - if sys.version_info >= (3,): + if PY3: fs_enc = sys.getfilesystemencoding() if sys.platform == 'win32': if fs_enc == 'cp1252': # Python 3 mangles the UTF-8 filename filename = filename.decode('cp1252') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: filename = filename.decode('mbcs') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files def test_sdist_with_latin1_encoded_filename(self): # Test for #303. @@ -371,16 +367,13 @@ class TestSdistTest(unittest.TestCase): # Latin-1 filename filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) open(filename, 'w').close() - self.assertTrue(os.path.isfile(filename)) + assert os.path.isfile(filename) - quiet() - try: + with quiet(): cmd.run() - finally: - unquiet() - if sys.version_info >= (3,): - #not all windows systems have a default FS encoding of cp1252 + if PY3: + # not all windows systems have a default FS encoding of cp1252 if sys.platform == 'win32': # Latin-1 is similar to Windows-1252 however # on mbcs filesys it is not in latin-1 encoding @@ -390,146 +383,37 @@ class TestSdistTest(unittest.TestCase): else: filename = filename.decode('latin-1') - self.assertTrue(filename in cmd.filelist.files) + assert filename in cmd.filelist.files else: # The Latin-1 filename should have been skipped filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) + filename not in cmd.filelist.files else: - # No conversion takes place under Python 2 and the file - # is included. We shall keep it that way for BBB. - self.assertTrue(filename in cmd.filelist.files) + # Under Python 2 there seems to be no decoded string in the + # filelist. However, due to decode and encoding of the + # file name to get utf-8 Manifest the latin1 maybe excluded + try: + # fs_enc should match how one is expect the decoding to + # be proformed for the manifest output. + fs_enc = sys.getfilesystemencoding() + filename.decode(fs_enc) + assert filename in cmd.filelist.files + except UnicodeDecodeError: + filename not in cmd.filelist.files -class TestDummyOutput(environment.ZippedEnvironment): +def test_default_revctrl(): + """ + When _default_revctrl was removed from the `setuptools.command.sdist` + module in 10.0, it broke some systems which keep an old install of + setuptools (Distribute) around. Those old versions require that the + setuptools package continue to implement that interface, so this + function provides that interface, stubbed. See #320 for details. - def setUp(self): - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', "dummy.zip") - self.dataname = "dummy" - super(TestDummyOutput, self).setUp() - - def _run(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=0) - if code: - info = "DIR: " + os.path.abspath('.') - info += "\n SDIST RETURNED: %i\n\n" % code - info += data - raise AssertionError(info) - - datalines = data.splitlines() - - possible = ( - "running sdist", - "running egg_info", - "creating dummy\.egg-info", - "writing dummy\.egg-info", - "writing top-level names to dummy\.egg-info", - "writing dependency_links to dummy\.egg-info", - "writing manifest file 'dummy\.egg-info", - "reading manifest file 'dummy\.egg-info", - "reading manifest template 'MANIFEST\.in'", - "writing manifest file 'dummy\.egg-info", - "creating dummy-0.1.1", - "making hard links in dummy-0\.1\.1", - "copying files to dummy-0\.1\.1", - "copying \S+ -> dummy-0\.1\.1", - "copying dummy", - "copying dummy\.egg-info", - "hard linking \S+ -> dummy-0\.1\.1", - "hard linking dummy", - "hard linking dummy\.egg-info", - "Writing dummy-0\.1\.1", - "creating dist", - "creating 'dist", - "Creating tar archive", - "running check", - "adding 'dummy-0\.1\.1", - "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", - "gzip .+ dist/dummy-0\.1\.1\.tar", - "removing 'dummy-0\.1\.1' \\(and everything under it\\)", - ) - - print(" DIR: " + os.path.abspath('.')) - for line in datalines: - found = False - for pattern in possible: - if re.match(pattern, line): - print(" READ: " + line) - found = True - break - if not found: - raise AssertionError("Unexpexected: %s\n-in-\n%s" - % (line, data)) - - return data - - def test_sources(self): - self._run() - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_walksvn(self): - if self.base_version >= (1, 6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - #TODO is this right - expected = set([ - os.path.join('a file'), - os.path.join(folder2, 'Changes.txt'), - os.path.join(folder2, 'MD5SUMS'), - os.path.join(folder2, 'README.txt'), - os.path.join(folder3, 'Changes.txt'), - os.path.join(folder3, 'MD5SUMS'), - os.path.join(folder3, 'README.txt'), - os.path.join(folder3, 'TODO.txt'), - os.path.join(folder3, 'fin'), - os.path.join('third_party', 'README.txt'), - os.path.join('folder', folder2, 'Changes.txt'), - os.path.join('folder', folder2, 'MD5SUMS'), - os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'Changes.txt'), - os.path.join('folder', folder3, 'fin'), - os.path.join('folder', folder3, 'MD5SUMS'), - os.path.join('folder', folder3, 'oops'), - os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'ZuMachen.txt'), - os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), - os.path.join('folder', 'lalala.txt'), - os.path.join('folder', 'quest.txt'), - # The example will have a deleted file - # (or should) but shouldn't return it - ]) - self.assertEqual(set(x for x in walk_revctrl()), expected) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) + This interface must be maintained until Ubuntu 12.04 is no longer + supported (by Setuptools). + """ + ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' + ep = pkg_resources.EntryPoint.parse(ep_def) + res = ep.resolve() + assert hasattr(res, '__iter__') diff --git a/awx/lib/site-packages/setuptools/tests/test_test.py b/awx/lib/site-packages/setuptools/tests/test_test.py index 7a06a40329..a66294c951 100644 --- a/awx/lib/site-packages/setuptools/tests/test_test.py +++ b/awx/lib/site-packages/setuptools/tests/test_test.py @@ -1,124 +1,91 @@ # -*- coding: UTF-8 -*- -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile +from __future__ import unicode_literals + +import os import site -from distutils.errors import DistutilsError -from setuptools.compat import StringIO +import pytest + from setuptools.command.test import test -from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution -SETUP_PY = """\ -from setuptools import setup +from .textwrap import DALS +from . import contexts -setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', -) -""" +SETUP_PY = DALS(""" + from setuptools import setup -NS_INIT = """# -*- coding: Latin-1 -*- -# Söme Arbiträry Ünicode to test Issüé 310 -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) -""" -# Make sure this is Latin-1 binary, before writing: -if sys.version_info < (3,): - NS_INIT = NS_INIT.decode('UTF-8') -NS_INIT = NS_INIT.encode('Latin-1') + setup(name='foo', + packages=['name', 'name.space', 'name.space.tests'], + namespace_packages=['name'], + test_suite='name.space.tests.test_suite', + ) + """) -TEST_PY = """import unittest +NS_INIT = DALS(""" + # -*- coding: Latin-1 -*- + # Söme Arbiträry Ünicode to test Distribute Issüé 310 + try: + __import__('pkg_resources').declare_namespace(__name__) + except ImportError: + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + """) -class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used +TEST_PY = DALS(""" + import unittest -test_suite = unittest.makeSuite(TestTest) -""" + class TestTest(unittest.TestCase): + def test_test(self): + print "Foo" # Should fail under Python 3 unless 2to3 is used -class TestTestTest(unittest.TestCase): + test_suite = unittest.makeSuite(TestTest) + """) - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'name')) - os.mkdir(os.path.join(self.dir, 'name', 'space')) - os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'wt') +@pytest.fixture +def sample_test(tmpdir_cwd): + os.makedirs('name/space/tests') + + # setup.py + with open('setup.py', 'wt') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # name/__init__.py - init = os.path.join(self.dir, 'name', '__init__.py') - f = open(init, 'wb') - f.write(NS_INIT) - f.close() - # name/space/__init__.py - init = os.path.join(self.dir, 'name', 'space', '__init__.py') - f = open(init, 'wt') + + # name/__init__.py + with open('name/__init__.py', 'wb') as f: + f.write(NS_INIT.encode('Latin-1')) + + # name/space/__init__.py + with open('name/space/__init__.py', 'wt') as f: f.write('#empty\n') - f.close() - # name/space/tests/__init__.py - init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') - f = open(init, 'wt') + + # name/space/tests/__init__.py + with open('name/space/tests/__init__.py', 'wt') as f: f.write(TEST_PY) - f.close() - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site +@pytest.mark.skipif('hasattr(sys, "real_prefix")') +@pytest.mark.usefixtures('user_override') +@pytest.mark.usefixtures('sample_test') +class TestTestTest: def test_test(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - dist = Distribution(dict( + params = dict( name='foo', packages=['name', 'name.space', 'name.space.tests'], namespace_packages=['name'], test_suite='name.space.tests.test_suite', use_2to3=True, - )) + ) + dist = Distribution(params) dist.script_name = 'setup.py' cmd = test(dist) cmd.user = 1 cmd.ensure_finalized() cmd.install_dir = site.USER_SITE cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. + with contexts.quiet(): + # The test runner calls sys.exit + with contexts.suppress_exceptions(SystemExit): cmd.run() - except SystemExit: # The test runner calls sys.exit, stop that making an error. - pass - finally: - sys.stdout = old_stdout - diff --git a/awx/lib/site-packages/setuptools/tests/test_upload_docs.py b/awx/lib/site-packages/setuptools/tests/test_upload_docs.py index 769f16cc5a..cc71cadb23 100644 --- a/awx/lib/site-packages/setuptools/tests/test_upload_docs.py +++ b/awx/lib/site-packages/setuptools/tests/test_upload_docs.py @@ -1,72 +1,59 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile +import os +import zipfile +import contextlib + +import pytest + from setuptools.command.upload_docs import upload_docs from setuptools.dist import Distribution -SETUP_PY = """\ -from setuptools import setup +from .textwrap import DALS +from . import contexts -setup(name='foo') -""" -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') +SETUP_PY = DALS( + """ + from setuptools import setup + + setup(name='foo') + """) + + +@pytest.fixture +def sample_project(tmpdir_cwd): + # setup.py + with open('setup.py', 'wt') as f: f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) + os.mkdir('build') - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') + # A test document. + with open('build/index.html', 'w') as f: f.write("Hello world.") - f.close() - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) + # An empty folder. + os.mkdir('build/empty') - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site +@pytest.mark.usefixtures('sample_project') +@pytest.mark.usefixtures('user_override') +class TestUploadDocsTest: def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. + """ + Ensure zipfile creation handles common cases, including a folder + containing an empty folder. + """ dist = Distribution() cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - cmd.target_dir = self.upload_dir - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, 'foo.zip') - try: + cmd.target_dir = cmd.upload_dir = 'build' + with contexts.tempdir() as tmp_dir: + tmp_file = os.path.join(tmp_dir, 'foo.zip') zip_file = cmd.create_zipfile(tmp_file) assert zipfile.is_zipfile(tmp_file) - zip_file = zipfile.ZipFile(tmp_file) # woh... - - assert zip_file.namelist() == ['index.html'] - - zip_file.close() - finally: - shutil.rmtree(tmp_dir) - + with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: + assert zip_file.namelist() == ['index.html'] diff --git a/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py b/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py new file mode 100644 index 0000000000..5b14d07b0e --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/test_windows_wrappers.py @@ -0,0 +1,183 @@ +""" +Python Script Wrapper for Windows +================================= + +setuptools includes wrappers for Python scripts that allows them to be +executed like regular windows programs. There are 2 wrappers, one +for command-line programs, cli.exe, and one for graphical programs, +gui.exe. These programs are almost identical, function pretty much +the same way, and are generated from the same source file. The +wrapper programs are used by copying them to the directory containing +the script they are to wrap and with the same name as the script they +are to wrap. +""" + +from __future__ import absolute_import + +import sys +import textwrap +import subprocess + +import pytest + +from setuptools.command.easy_install import nt_quote_arg +import pkg_resources + + +pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only") + + +class WrapperTester: + + @classmethod + def prep_script(cls, template): + python_exe = nt_quote_arg(sys.executable) + return template % locals() + + @classmethod + def create_script(cls, tmpdir): + """ + Create a simple script, foo-script.py + + Note that the script starts with a Unix-style '#!' line saying which + Python executable to run. The wrapper will use this line to find the + correct Python executable. + """ + + script = cls.prep_script(cls.script_tmpl) + + with (tmpdir / cls.script_name).open('w') as f: + f.write(script) + + # also copy cli.exe to the sample directory + with (tmpdir / cls.wrapper_name).open('wb') as f: + w = pkg_resources.resource_string('setuptools', cls.wrapper_source) + f.write(w) + + +class TestCLI(WrapperTester): + script_name = 'foo-script.py' + wrapper_source = 'cli-32.exe' + wrapper_name = 'foo.exe' + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + """).lstrip() + + def test_basic(self, tmpdir): + """ + When the copy of cli.exe, foo.exe in this example, runs, it examines + the path name it was run with and computes a Python script path name + by removing the '.exe' suffix and adding the '-script.py' suffix. (For + GUI programs, the suffix '-script.pyw' is added.) This is why we + named out script the way we did. Now we can run out script by running + the wrapper: + + This example was a little pathological in that it exercised windows + (MS C runtime) quoting rules: + + - Strings containing spaces are surrounded by double quotes. + + - Double quotes in strings need to be escaped by preceding them with + back slashes. + + - One or more backslashes preceding double quotes need to be escaped + by preceding each of them with back slashes. + """ + self.create_script(tmpdir) + cmd = [ + str(tmpdir / 'foo.exe'), + 'arg1', + 'arg 2', + 'arg "2\\"', + 'arg 4\\', + 'arg5 a\\\\b', + ] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) + stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] + 'hello\nworld\n' + non-optimized + """).lstrip() + assert actual == expected + + def test_with_options(self, tmpdir): + """ + Specifying Python Command-line Options + -------------------------------------- + + You can specify a single argument on the '#!' line. This can be used + to specify Python options like -O, to run in optimized mode or -i + to start the interactive interpreter. You can combine multiple + options as usual. For example, to run in optimized mode and + enter the interpreter after running the script, you could use -Oi: + """ + self.create_script(tmpdir) + tmpl = textwrap.dedent(""" + #!%(python_exe)s -Oi + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + sys.ps1 = '---' + """).lstrip() + with (tmpdir / 'foo-script.py').open('w') as f: + f.write(self.prep_script(tmpl)) + cmd = [str(tmpdir / 'foo.exe')] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + [] + '' + --- + """).lstrip() + assert actual == expected + + +class TestGUI(WrapperTester): + """ + Testing the GUI Version + ----------------------- + """ + script_name = 'bar-script.pyw' + wrapper_source = 'gui-32.exe' + wrapper_name = 'bar.exe' + + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + f = open(sys.argv[1], 'wb') + bytes_written = f.write(repr(sys.argv[2]).encode('utf-8')) + f.close() + """).strip() + + def test_basic(self, tmpdir): + """Test the GUI version with the simple scipt, bar-script.py""" + self.create_script(tmpdir) + + cmd = [ + str(tmpdir / 'bar.exe'), + str(tmpdir / 'test_output.txt'), + 'Test Argument', + ] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + assert not stdout + assert not stderr + with (tmpdir / 'test_output.txt').open('rb') as f_out: + actual = f_out.read().decode('ascii') + assert actual == repr('Test Argument') diff --git a/awx/lib/site-packages/setuptools/tests/textwrap.py b/awx/lib/site-packages/setuptools/tests/textwrap.py new file mode 100644 index 0000000000..5cd9e5bca8 --- /dev/null +++ b/awx/lib/site-packages/setuptools/tests/textwrap.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +import textwrap + + +def DALS(s): + "dedent and left-strip" + return textwrap.dedent(s).lstrip() diff --git a/awx/lib/site-packages/setuptools/unicode_utils.py b/awx/lib/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000000..d2de941a69 --- /dev/null +++ b/awx/lib/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,41 @@ +import unicodedata +import sys +from setuptools.compat import unicode as decoded_string + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, decoded_string): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + fs_enc = sys.getfilesystemencoding() + if isinstance(path, decoded_string): + return path + + for enc in (fs_enc, "utf-8"): + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/awx/lib/site-packages/setuptools/utils.py b/awx/lib/site-packages/setuptools/utils.py new file mode 100644 index 0000000000..91e4b87f65 --- /dev/null +++ b/awx/lib/site-packages/setuptools/utils.py @@ -0,0 +1,11 @@ +import os +import os.path + + +def cs_path_exists(fspath): + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) \ No newline at end of file diff --git a/awx/lib/site-packages/setuptools/version.py b/awx/lib/site-packages/setuptools/version.py index 2b9ccf1770..8bb69c5f6d 100644 --- a/awx/lib/site-packages/setuptools/version.py +++ b/awx/lib/site-packages/setuptools/version.py @@ -1 +1 @@ -__version__ = '2.2' +__version__ = '12.0.5' diff --git a/awx/lib/site-packages/setuptools/windows_support.py b/awx/lib/site-packages/setuptools/windows_support.py new file mode 100644 index 0000000000..cb977cff95 --- /dev/null +++ b/awx/lib/site-packages/setuptools/windows_support.py @@ -0,0 +1,29 @@ +import platform +import ctypes + + +def windows_only(func): + if platform.system() != 'Windows': + return lambda *args, **kwargs: None + return func + + +@windows_only +def hide_file(path): + """ + Set the hidden attribute on a file or directory. + + From http://stackoverflow.com/questions/19622133/ + + `path` must be text. + """ + __import__('ctypes.wintypes') + SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW + SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD + SetFileAttributes.restype = ctypes.wintypes.BOOL + + FILE_ATTRIBUTE_HIDDEN = 0x02 + + ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) + if not ret: + raise ctypes.WinError() diff --git a/awx/lib/site-packages/six.py b/awx/lib/site-packages/six.py index f8f7d402ff..ffa3fe166a 100644 --- a/awx/lib/site-packages/six.py +++ b/awx/lib/site-packages/six.py @@ -1,6 +1,6 @@ """Utilities for writing code that runs on Python 2 and 3""" -# Copyright (c) 2010-2014 Benjamin Peterson +# Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,13 +20,16 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import absolute_import + import functools +import itertools import operator import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" -__version__ = "1.7.3" +__version__ = "1.9.0" # Useful for very coarse version differentiation. @@ -86,8 +89,12 @@ class _LazyDescr(object): def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. - # This is a bit ugly, but it avoids running this again. - delattr(obj.__class__, self.name) + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass return result @@ -225,10 +232,12 @@ _moved_attributes = [ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), @@ -248,6 +257,7 @@ _moved_attributes = [ MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), @@ -317,6 +327,13 @@ _urllib_parse_moved_attributes = [ MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) @@ -542,6 +559,12 @@ if PY3: def iterlists(d, **kw): return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return iter(d.iterkeys(**kw)) @@ -555,6 +578,12 @@ else: def iterlists(d, **kw): return iter(d.iterlists(**kw)) + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, @@ -581,6 +610,9 @@ if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" else: def b(s): return s @@ -593,19 +625,35 @@ else: return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) - def iterbytes(buf): - return (ord(byte) for byte in buf) + iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): + if value is None: + value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value @@ -629,6 +677,21 @@ else: """) +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): @@ -683,13 +746,22 @@ if print_ is None: write(sep) write(arg) write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): - def wraps(wrapped): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): def wrapper(f): - f = functools.wraps(wrapped)(f) + f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper @@ -711,17 +783,36 @@ def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. diff --git a/awx/main/management/commands/run_socketio_service.py b/awx/main/management/commands/run_socketio_service.py index 0788217a21..7fc892b46b 100644 --- a/awx/main/management/commands/run_socketio_service.py +++ b/awx/main/management/commands/run_socketio_service.py @@ -122,7 +122,9 @@ def notification_handler(server): 'type': 'event', } for session_id, socket in list(server.sockets.iteritems()): + print("Going to emit an event, here are our sessions: %s" % valid_sockets) if session_id in valid_sockets: + print("Sending to session: %s" % str(session_id)) socket.send_packet(packet) class Command(NoArgsCommand): diff --git a/requirements/dev_local.txt b/requirements/dev_local.txt index 315c1e3869..82968bc54d 100644 --- a/requirements/dev_local.txt +++ b/requirements/dev_local.txt @@ -2,7 +2,7 @@ # packages). Install using "pip --no-index -r dev_local.txt". # May need to install these packages separately the first time: -setuptools-2.2.tar.gz +setuptools-12.0.5.tar.gz distribute-0.7.3.zip Django-1.6.7.tar.gz diff --git a/requirements/setuptools-12.0.5.tar.gz b/requirements/setuptools-12.0.5.tar.gz new file mode 100644 index 0000000000..c70e2d7687 Binary files /dev/null and b/requirements/setuptools-12.0.5.tar.gz differ diff --git a/requirements/setuptools-2.2.tar.gz b/requirements/setuptools-2.2.tar.gz deleted file mode 100644 index 3ae2530d9e..0000000000 Binary files a/requirements/setuptools-2.2.tar.gz and /dev/null differ