purge of inventory file syntax testing

* remove use of temporary files in inventory import
* remove old inventory import tests
* remove supporting example inventory files

This type of testing has been migrated to the backport
repository because it will be handled by the inventory module.
From here on out, no inventory syntax testing will be
maintained inside of the Ansible Tower repo.
This commit is contained in:
AlanCoding 2017-04-26 10:38:51 -04:00
parent 2488dabca6
commit ef01fea89c
5 changed files with 135 additions and 1517 deletions

View File

@ -1,890 +0,0 @@
#!/usr/bin/env python
# Python
import json
import optparse
inv_list = {
"ansible1.axialmarket.com": [
"ec2-54-226-227-106.compute-1.amazonaws.com"
],
"ansible2.axialmarket.com": [
"ec2-54-227-113-75.compute-1.amazonaws.com"
],
"app1new.axialmarket.com": [
"ec2-54-235-143-131.compute-1.amazonaws.com"
],
"app2new.axialmarket.com": [
"ec2-54-235-143-132.compute-1.amazonaws.com"
],
"app2t.axialmarket.com": [
"ec2-23-23-168-208.compute-1.amazonaws.com"
],
"app2t.dev.axialmarket.com": [
"ec2-23-23-168-208.compute-1.amazonaws.com"
],
"awx.axialmarket.com": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"axtdev2.axialmarket.com": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"backup1.axialmarket.com": [
"ec2-23-23-170-30.compute-1.amazonaws.com"
],
"bah.axialmarket.com": [
"ec2-107-20-176-139.compute-1.amazonaws.com"
],
"bennew.axialmarket.com": [
"ec2-54-243-146-75.compute-1.amazonaws.com"
],
"build0.axialmarket.com": [
"ec2-54-226-244-191.compute-1.amazonaws.com"
],
"cburke0.axialmarket.com": [
"ec2-54-226-100-117.compute-1.amazonaws.com"
],
"dabnew.axialmarket.com": [
"ec2-107-22-248-113.compute-1.amazonaws.com"
],
"dannew.axialmarket.com": [
"ec2-107-22-247-88.compute-1.amazonaws.com"
],
"de1-intenv.axialmarket.com": [
"ec2-54-224-92-80.compute-1.amazonaws.com"
],
"dev11-20120311": [
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"dev11-20130828": [
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"dev11-20130903-dab": [
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"firecrow.axialmarket.com": [
"ec2-54-227-30-105.compute-1.amazonaws.com"
],
"herby0.axialmarket.com": [
"ec2-174-129-140-30.compute-1.amazonaws.com"
],
"i-02966c7a": [
"ec2-23-21-57-109.compute-1.amazonaws.com"
],
"i-0485b47c": [
"ec2-23-23-168-208.compute-1.amazonaws.com"
],
"i-0805a578": [
"ec2-107-22-234-22.compute-1.amazonaws.com"
],
"i-0a1e4777": [
"ec2-75-101-129-169.compute-1.amazonaws.com"
],
"i-0e05a57e": [
"ec2-107-22-234-180.compute-1.amazonaws.com"
],
"i-116f5861": [
"ec2-54-235-143-162.compute-1.amazonaws.com"
],
"i-197edf79": [
"ec2-54-226-244-191.compute-1.amazonaws.com"
],
"i-26008355": [
"ec2-75-101-157-248.compute-1.amazonaws.com"
],
"i-2ff6135e": [
"ec2-54-242-36-133.compute-1.amazonaws.com"
],
"i-3cbc6d50": [
"ec2-54-234-233-19.compute-1.amazonaws.com"
],
"i-3e9a7f5b": [
"ec2-54-224-92-80.compute-1.amazonaws.com"
],
"i-43f6a533": [
"ec2-54-235-143-131.compute-1.amazonaws.com"
],
"i-45906822": [
"ec2-23-21-100-222.compute-1.amazonaws.com"
],
"i-508c1923": [
"ec2-23-23-130-201.compute-1.amazonaws.com"
],
"i-52970021": [
"ec2-23-23-169-133.compute-1.amazonaws.com"
],
"i-57cc2c25": [
"ec2-54-225-229-159.compute-1.amazonaws.com"
],
"i-59f23536": [
"ec2-75-101-128-47.compute-1.amazonaws.com"
],
"i-7012b200": [
"ec2-107-22-249-212.compute-1.amazonaws.com"
],
"i-73fead03": [
"ec2-54-235-143-132.compute-1.amazonaws.com"
],
"i-75faa905": [
"ec2-54-235-143-133.compute-1.amazonaws.com"
],
"i-76e49b0e": [
"ec2-75-101-128-224.compute-1.amazonaws.com"
],
"i-78c9450b": [
"ec2-54-225-88-116.compute-1.amazonaws.com"
],
"i-7aa18911": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"i-7dfdae0d": [
"ec2-54-235-143-134.compute-1.amazonaws.com"
],
"i-8559d6fa": [
"ec2-23-21-224-105.compute-1.amazonaws.com"
],
"i-899768e4": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"i-918130fb": [
"ec2-174-129-171-101.compute-1.amazonaws.com"
],
"i-99ce0ceb": [
"ec2-107-22-234-92.compute-1.amazonaws.com"
],
"i-9a450df8": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"i-9fce0ced": [
"ec2-107-20-176-139.compute-1.amazonaws.com"
],
"i-a80682c4": [
"ec2-54-235-65-26.compute-1.amazonaws.com"
],
"i-b43ab5df": [
"ec2-174-129-140-30.compute-1.amazonaws.com"
],
"i-baa893c2": [
"ec2-23-23-170-30.compute-1.amazonaws.com"
],
"i-bc23a0cf": [
"ec2-75-101-159-82.compute-1.amazonaws.com"
],
"i-bed948cd": [
"ec2-54-235-112-3.compute-1.amazonaws.com"
],
"i-c200c4a8": [
"ec2-54-227-30-105.compute-1.amazonaws.com"
],
"i-c69ae2be": [
"ec2-23-21-133-17.compute-1.amazonaws.com"
],
"i-c6d33fa3": [
"ec2-54-226-100-117.compute-1.amazonaws.com"
],
"i-cc4d2abf": [
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"i-cc9c3fbc": [
"ec2-54-243-146-75.compute-1.amazonaws.com"
],
"i-d01dacb3": [
"ec2-54-234-218-33.compute-1.amazonaws.com"
],
"i-da6631b3": [
"ec2-54-226-227-106.compute-1.amazonaws.com"
],
"i-dc6631b5": [
"ec2-54-227-113-75.compute-1.amazonaws.com"
],
"i-f005a580": [
"ec2-107-22-241-13.compute-1.amazonaws.com"
],
"i-f605a586": [
"ec2-107-22-247-88.compute-1.amazonaws.com"
],
"i-f805a588": [
"ec2-107-22-248-113.compute-1.amazonaws.com"
],
"i-f9829894": [
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"inf.axialmarket.com": [
"ec2-54-225-229-159.compute-1.amazonaws.com"
],
"jeffnew.axialmarket.com": [
"ec2-107-22-234-180.compute-1.amazonaws.com"
],
"jenkins.axialmarket.com": [
"ec2-23-21-224-105.compute-1.amazonaws.com"
],
"jump.axialmarket.com": [
"ec2-23-23-169-133.compute-1.amazonaws.com"
],
"key_Dana_Spiegel": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"key_bah-20130614": [
"ec2-54-234-218-33.compute-1.amazonaws.com",
"ec2-54-226-244-191.compute-1.amazonaws.com"
],
"key_herby-axial-20130903": [
"ec2-54-224-92-80.compute-1.amazonaws.com"
],
"key_herbyg-axial-201308": [
"ec2-54-211-252-32.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"key_ike-20120322": [
"ec2-23-21-100-222.compute-1.amazonaws.com",
"ec2-23-21-57-109.compute-1.amazonaws.com",
"ec2-75-101-128-224.compute-1.amazonaws.com",
"ec2-23-21-133-17.compute-1.amazonaws.com",
"ec2-23-23-168-208.compute-1.amazonaws.com",
"ec2-23-23-170-30.compute-1.amazonaws.com",
"ec2-75-101-129-169.compute-1.amazonaws.com",
"ec2-23-21-224-105.compute-1.amazonaws.com",
"ec2-54-242-36-133.compute-1.amazonaws.com",
"ec2-107-22-234-22.compute-1.amazonaws.com",
"ec2-107-22-234-180.compute-1.amazonaws.com",
"ec2-107-22-241-13.compute-1.amazonaws.com",
"ec2-107-22-247-88.compute-1.amazonaws.com",
"ec2-107-22-248-113.compute-1.amazonaws.com",
"ec2-107-22-249-212.compute-1.amazonaws.com",
"ec2-54-243-146-75.compute-1.amazonaws.com",
"ec2-54-235-143-131.compute-1.amazonaws.com",
"ec2-54-235-143-133.compute-1.amazonaws.com",
"ec2-54-235-143-132.compute-1.amazonaws.com",
"ec2-54-235-143-134.compute-1.amazonaws.com",
"ec2-54-235-143-162.compute-1.amazonaws.com",
"ec2-75-101-157-248.compute-1.amazonaws.com",
"ec2-75-101-159-82.compute-1.amazonaws.com",
"ec2-54-225-88-116.compute-1.amazonaws.com",
"ec2-23-23-169-133.compute-1.amazonaws.com",
"ec2-54-235-112-3.compute-1.amazonaws.com",
"ec2-54-225-229-159.compute-1.amazonaws.com",
"ec2-107-22-234-92.compute-1.amazonaws.com",
"ec2-107-20-176-139.compute-1.amazonaws.com",
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"key_matt-20120423": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com",
"ec2-54-235-65-26.compute-1.amazonaws.com",
"ec2-174-129-171-101.compute-1.amazonaws.com",
"ec2-54-234-233-19.compute-1.amazonaws.com",
"ec2-174-129-140-30.compute-1.amazonaws.com",
"ec2-54-227-30-105.compute-1.amazonaws.com",
"ec2-54-226-100-117.compute-1.amazonaws.com"
],
"key_mike-20121126": [
"ec2-75-101-128-47.compute-1.amazonaws.com",
"ec2-23-23-130-201.compute-1.amazonaws.com",
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"logstore1.axialmarket.com": [
"ec2-75-101-129-169.compute-1.amazonaws.com"
],
"logstore2.axialmarket.com": [
"ec2-54-235-112-3.compute-1.amazonaws.com"
],
"mattnew.axialmarket.com": [
"ec2-107-22-241-13.compute-1.amazonaws.com"
],
"monitor0.axialmarket.com": [
"ec2-54-235-65-26.compute-1.amazonaws.com"
],
"mx0.axialmarket.com": [
"ec2-23-21-57-109.compute-1.amazonaws.com"
],
"mx0a.axialmarket.com": [
"ec2-23-21-224-105.compute-1.amazonaws.com"
],
"mx1.axialmarket.com": [
"ec2-75-101-128-47.compute-1.amazonaws.com"
],
"mx2.axialmarket.com": [
"ec2-75-101-128-224.compute-1.amazonaws.com"
],
"mx5.axialmarket.com": [
"ec2-75-101-129-169.compute-1.amazonaws.com"
],
"pak.axialmarket.com": [
"ec2-54-242-36-133.compute-1.amazonaws.com"
],
"pak0.axialmarket.com": [
"ec2-54-242-36-133.compute-1.amazonaws.com"
],
"poundtest1.axialmarket.com": [
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"production-db7": [
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"production-db7-rdssnap-p4hsx77hy8l5zqj": [
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"production-readonly-db7": [
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"rabbit.axialmarket.com": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"rds_mysql": [
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"rds_parameter_group_axialmarket-5-5": [
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"rds_parameter_group_default_mysql5_1": [
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"rds_parameter_group_default_mysql5_5": [
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"rds_parameter_group_mysqldump": [
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"releng0.axialmarket.com": [
"ec2-23-21-100-222.compute-1.amazonaws.com"
],
"releng1.axialmarket.com": [
"ec2-23-21-133-17.compute-1.amazonaws.com"
],
"rexnew.axialmarket.com": [
"ec2-54-235-143-162.compute-1.amazonaws.com"
],
"rollupy0.axialmarket.com": [
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"security_group_MTA": [
"ec2-75-101-128-47.compute-1.amazonaws.com",
"ec2-23-21-57-109.compute-1.amazonaws.com",
"ec2-75-101-128-224.compute-1.amazonaws.com",
"ec2-23-21-224-105.compute-1.amazonaws.com"
],
"security_group_WWW-PROD-2013": [
"ec2-75-101-157-248.compute-1.amazonaws.com",
"ec2-75-101-159-82.compute-1.amazonaws.com"
],
"security_group_backup2012": [
"ec2-23-23-170-30.compute-1.amazonaws.com"
],
"security_group_dataeng-test": [
"ec2-54-224-92-80.compute-1.amazonaws.com"
],
"security_group_development-2013-Jan": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com",
"ec2-174-129-171-101.compute-1.amazonaws.com",
"ec2-54-234-233-19.compute-1.amazonaws.com",
"ec2-54-234-218-33.compute-1.amazonaws.com",
"ec2-54-226-244-191.compute-1.amazonaws.com",
"ec2-174-129-140-30.compute-1.amazonaws.com",
"ec2-54-227-30-105.compute-1.amazonaws.com",
"ec2-54-226-100-117.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com",
"ec2-107-22-234-22.compute-1.amazonaws.com",
"ec2-107-22-234-180.compute-1.amazonaws.com",
"ec2-107-22-241-13.compute-1.amazonaws.com",
"ec2-107-22-247-88.compute-1.amazonaws.com",
"ec2-107-22-248-113.compute-1.amazonaws.com",
"ec2-107-22-249-212.compute-1.amazonaws.com",
"ec2-54-243-146-75.compute-1.amazonaws.com",
"ec2-54-235-143-162.compute-1.amazonaws.com",
"ec2-54-225-88-116.compute-1.amazonaws.com",
"ec2-23-23-130-201.compute-1.amazonaws.com",
"ec2-107-20-160-49.compute-1.amazonaws.com",
"ec2-107-22-234-92.compute-1.amazonaws.com",
"ec2-107-20-176-139.compute-1.amazonaws.com"
],
"security_group_development-summer2012": [
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"security_group_development2012July": [
"ec2-23-23-168-208.compute-1.amazonaws.com"
],
"security_group_inf-mgmt-2013": [
"ec2-54-225-229-159.compute-1.amazonaws.com"
],
"security_group_jump": [
"ec2-23-23-169-133.compute-1.amazonaws.com"
],
"security_group_monitor-GOD-2013": [
"ec2-54-235-65-26.compute-1.amazonaws.com"
],
"security_group_pak-internal": [
"ec2-54-242-36-133.compute-1.amazonaws.com"
],
"security_group_production": [
"ec2-50-19-184-148.compute-1.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"security_group_production-NEWWORLD-201202": [
"ec2-54-235-143-131.compute-1.amazonaws.com",
"ec2-54-235-143-133.compute-1.amazonaws.com",
"ec2-54-235-143-132.compute-1.amazonaws.com",
"ec2-54-235-143-134.compute-1.amazonaws.com",
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"security_group_production-awx": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"security_group_releng20120404": [
"ec2-23-21-100-222.compute-1.amazonaws.com",
"ec2-23-21-133-17.compute-1.amazonaws.com"
],
"security_group_util-20121011": [
"ec2-75-101-129-169.compute-1.amazonaws.com",
"ec2-54-235-112-3.compute-1.amazonaws.com"
],
"security_group_www-mktg": [
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"stevenew.axialmarket.com": [
"ec2-107-22-234-92.compute-1.amazonaws.com"
],
"tag_Environment_Production": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"tag_Name_INF-umgmt1": [
"ec2-54-225-229-159.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-PROD-app1": [
"ec2-54-235-143-131.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-PROD-app2": [
"ec2-54-235-143-132.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-PROD-worker1": [
"ec2-54-235-143-133.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-PROD-worker2": [
"ec2-54-235-143-134.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-bah": [
"ec2-107-20-176-139.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-bennew": [
"ec2-54-243-146-75.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-dabnew": [
"ec2-107-22-248-113.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-dannew": [
"ec2-107-22-247-88.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-jeffnew": [
"ec2-107-22-234-180.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-jumphost-2": [
"ec2-23-23-169-133.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-mattnew": [
"ec2-107-22-241-13.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-poundtest1": [
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-poundtest1_": [
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-rexnew": [
"ec2-54-235-143-162.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-stevenew-replace": [
"ec2-107-22-234-92.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-tannernew": [
"ec2-23-23-130-201.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-thomasnew-2": [
"ec2-54-225-88-116.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-willnew": [
"ec2-107-22-234-22.compute-1.amazonaws.com"
],
"tag_Name_NEWWORLD-worker1devnew": [
"ec2-107-22-249-212.compute-1.amazonaws.com"
],
"tag_Name_WWW-TEST": [
"ec2-54-234-233-19.compute-1.amazonaws.com"
],
"tag_Name_WWW1-MKTG": [
"ec2-75-101-157-248.compute-1.amazonaws.com"
],
"tag_Name_WWW2-MKTG": [
"ec2-75-101-159-82.compute-1.amazonaws.com"
],
"tag_Name_ansible": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com"
],
"tag_Name_app2t_development_axialmarket_com": [
"ec2-23-23-168-208.compute-1.amazonaws.com"
],
"tag_Name_awx": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"tag_Name_axtdev2": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tag_Name_backup1": [
"ec2-23-23-170-30.compute-1.amazonaws.com"
],
"tag_Name_build_server": [
"ec2-54-226-244-191.compute-1.amazonaws.com"
],
"tag_Name_cburke0": [
"ec2-54-226-100-117.compute-1.amazonaws.com"
],
"tag_Name_dataeng_test1": [
"ec2-54-224-92-80.compute-1.amazonaws.com"
],
"tag_Name_firecrow-dev": [
"ec2-54-227-30-105.compute-1.amazonaws.com"
],
"tag_Name_herby0": [
"ec2-174-129-140-30.compute-1.amazonaws.com"
],
"tag_Name_logstore1": [
"ec2-75-101-129-169.compute-1.amazonaws.com"
],
"tag_Name_logstore2": [
"ec2-54-235-112-3.compute-1.amazonaws.com"
],
"tag_Name_mx0": [
"ec2-23-21-57-109.compute-1.amazonaws.com"
],
"tag_Name_mx0a": [
"ec2-23-21-224-105.compute-1.amazonaws.com"
],
"tag_Name_mx1_new": [
"ec2-75-101-128-47.compute-1.amazonaws.com"
],
"tag_Name_mx2": [
"ec2-75-101-128-224.compute-1.amazonaws.com"
],
"tag_Name_new-testapp1": [
"ec2-174-129-171-101.compute-1.amazonaws.com"
],
"tag_Name_pak0_axialmarket_com": [
"ec2-54-242-36-133.compute-1.amazonaws.com"
],
"tag_Name_rabbit_axialmarket_com": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"tag_Name_releng0": [
"ec2-23-21-100-222.compute-1.amazonaws.com"
],
"tag_Name_releng1": [
"ec2-23-21-133-17.compute-1.amazonaws.com"
],
"tag_Name_rollupy0-PROD": [
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"tag_Name_tannernew_": [
"ec2-23-23-130-201.compute-1.amazonaws.com"
],
"tag_Name_testapp1": [
"ec2-54-234-218-33.compute-1.amazonaws.com"
],
"tag_Name_zabbix-upgrade": [
"ec2-54-235-65-26.compute-1.amazonaws.com"
],
"tag_Use_RabbitMQ__celerycam__celerybeat__celeryd__postfix": [
"ec2-50-19-184-148.compute-1.amazonaws.com"
],
"tag_environment_dev": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tag_environment_production": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"tag_id_awx": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"tag_id_axtdev2": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tag_os_ubuntu": [
"ec2-54-211-252-32.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tag_primary_role_awx": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"tag_primary_role_dev": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tag_purpose_syscleanup": [
"ec2-23-21-100-222.compute-1.amazonaws.com"
],
"tag_role_awx_": [
"ec2-54-211-252-32.compute-1.amazonaws.com"
],
"tag_role_dev_": [
"ec2-54-234-3-7.compute-1.amazonaws.com"
],
"tannernew.axialmarket.com": [
"ec2-23-23-130-201.compute-1.amazonaws.com"
],
"testapp1.axialmarket.com": [
"ec2-174-129-171-101.compute-1.amazonaws.com"
],
"testapp2.axialmarket.com": [
"ec2-54-234-218-33.compute-1.amazonaws.com"
],
"testnoelb.axialmarket.com": [
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"testworker1.axialmarket.com": [
"ec2-107-22-249-212.compute-1.amazonaws.com"
],
"thomasnew.axialmarket.com": [
"ec2-54-225-88-116.compute-1.amazonaws.com"
],
"type_db_m1_medium": [
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"type_db_m1_xlarge": [
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"type_m1_large": [
"ec2-54-235-65-26.compute-1.amazonaws.com",
"ec2-174-129-171-101.compute-1.amazonaws.com",
"ec2-54-234-218-33.compute-1.amazonaws.com",
"ec2-50-19-184-148.compute-1.amazonaws.com",
"ec2-174-129-140-30.compute-1.amazonaws.com",
"ec2-54-227-30-105.compute-1.amazonaws.com",
"ec2-54-226-100-117.compute-1.amazonaws.com",
"ec2-54-224-92-80.compute-1.amazonaws.com",
"ec2-23-23-168-208.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com",
"ec2-107-22-234-22.compute-1.amazonaws.com",
"ec2-107-22-234-180.compute-1.amazonaws.com",
"ec2-107-22-241-13.compute-1.amazonaws.com",
"ec2-107-22-247-88.compute-1.amazonaws.com",
"ec2-107-22-248-113.compute-1.amazonaws.com",
"ec2-107-22-249-212.compute-1.amazonaws.com",
"ec2-54-243-146-75.compute-1.amazonaws.com",
"ec2-54-235-143-131.compute-1.amazonaws.com",
"ec2-54-235-143-132.compute-1.amazonaws.com",
"ec2-54-235-143-162.compute-1.amazonaws.com",
"ec2-23-23-130-201.compute-1.amazonaws.com",
"ec2-107-22-234-92.compute-1.amazonaws.com",
"ec2-107-20-176-139.compute-1.amazonaws.com"
],
"type_m1_medium": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com",
"ec2-54-234-233-19.compute-1.amazonaws.com",
"ec2-54-226-244-191.compute-1.amazonaws.com",
"ec2-23-21-100-222.compute-1.amazonaws.com",
"ec2-23-21-133-17.compute-1.amazonaws.com",
"ec2-54-211-252-32.compute-1.amazonaws.com",
"ec2-54-242-36-133.compute-1.amazonaws.com",
"ec2-75-101-157-248.compute-1.amazonaws.com",
"ec2-75-101-159-82.compute-1.amazonaws.com",
"ec2-54-225-88-116.compute-1.amazonaws.com",
"ec2-23-23-169-133.compute-1.amazonaws.com"
],
"type_m1_small": [
"ec2-75-101-129-169.compute-1.amazonaws.com",
"ec2-107-20-160-49.compute-1.amazonaws.com"
],
"type_m1_xlarge": [
"ec2-54-235-143-133.compute-1.amazonaws.com",
"ec2-54-235-143-134.compute-1.amazonaws.com",
"ec2-54-235-112-3.compute-1.amazonaws.com",
"ec2-54-225-172-84.compute-1.amazonaws.com"
],
"type_m2_2xlarge": [
"ec2-23-23-170-30.compute-1.amazonaws.com"
],
"type_t1_micro": [
"ec2-75-101-128-47.compute-1.amazonaws.com",
"ec2-23-21-57-109.compute-1.amazonaws.com",
"ec2-75-101-128-224.compute-1.amazonaws.com",
"ec2-23-21-224-105.compute-1.amazonaws.com",
"ec2-54-225-229-159.compute-1.amazonaws.com"
],
"us-east-1": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com",
"ec2-54-235-65-26.compute-1.amazonaws.com",
"ec2-174-129-171-101.compute-1.amazonaws.com",
"ec2-54-234-233-19.compute-1.amazonaws.com",
"ec2-75-101-128-47.compute-1.amazonaws.com",
"ec2-54-234-218-33.compute-1.amazonaws.com",
"ec2-54-226-244-191.compute-1.amazonaws.com",
"ec2-50-19-184-148.compute-1.amazonaws.com",
"ec2-174-129-140-30.compute-1.amazonaws.com",
"ec2-54-227-30-105.compute-1.amazonaws.com",
"ec2-23-21-100-222.compute-1.amazonaws.com",
"ec2-54-226-100-117.compute-1.amazonaws.com",
"ec2-54-224-92-80.compute-1.amazonaws.com",
"ec2-23-21-57-109.compute-1.amazonaws.com",
"ec2-75-101-128-224.compute-1.amazonaws.com",
"ec2-23-21-133-17.compute-1.amazonaws.com",
"ec2-23-23-168-208.compute-1.amazonaws.com",
"ec2-23-23-170-30.compute-1.amazonaws.com",
"ec2-54-211-252-32.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com",
"ec2-75-101-129-169.compute-1.amazonaws.com",
"ec2-23-21-224-105.compute-1.amazonaws.com",
"ec2-54-242-36-133.compute-1.amazonaws.com",
"ec2-107-22-234-22.compute-1.amazonaws.com",
"ec2-107-22-234-180.compute-1.amazonaws.com",
"ec2-107-22-241-13.compute-1.amazonaws.com",
"ec2-107-22-247-88.compute-1.amazonaws.com",
"ec2-107-22-248-113.compute-1.amazonaws.com",
"ec2-107-22-249-212.compute-1.amazonaws.com",
"ec2-54-243-146-75.compute-1.amazonaws.com",
"ec2-54-235-143-131.compute-1.amazonaws.com",
"ec2-54-235-143-133.compute-1.amazonaws.com",
"ec2-54-235-143-132.compute-1.amazonaws.com",
"ec2-54-235-143-134.compute-1.amazonaws.com",
"ec2-54-235-143-162.compute-1.amazonaws.com",
"ec2-75-101-157-248.compute-1.amazonaws.com",
"ec2-75-101-159-82.compute-1.amazonaws.com",
"ec2-54-225-88-116.compute-1.amazonaws.com",
"ec2-23-23-130-201.compute-1.amazonaws.com",
"ec2-23-23-169-133.compute-1.amazonaws.com",
"ec2-54-235-112-3.compute-1.amazonaws.com",
"ec2-107-20-160-49.compute-1.amazonaws.com",
"ec2-54-225-229-159.compute-1.amazonaws.com",
"ec2-107-22-234-92.compute-1.amazonaws.com",
"ec2-107-20-176-139.compute-1.amazonaws.com",
"ec2-54-225-172-84.compute-1.amazonaws.com",
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"us-east-1c": [
"ec2-23-21-100-222.compute-1.amazonaws.com",
"ec2-23-23-168-208.compute-1.amazonaws.com",
"ec2-75-101-129-169.compute-1.amazonaws.com",
"ec2-107-22-249-212.compute-1.amazonaws.com",
"ec2-54-235-143-132.compute-1.amazonaws.com",
"ec2-54-235-143-134.compute-1.amazonaws.com",
"ec2-75-101-157-248.compute-1.amazonaws.com",
"ec2-54-235-112-3.compute-1.amazonaws.com",
"ec2-107-20-160-49.compute-1.amazonaws.com",
"ec2-54-225-172-84.compute-1.amazonaws.com",
"dev11-20130828.co735munpzcw.us-east-1.rds.amazonaws.com",
"dev11-20130903-dab.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-db7-rdssnap-p4hsx77hy8l5zqj.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"us-east-1d": [
"ec2-54-226-227-106.compute-1.amazonaws.com",
"ec2-54-227-113-75.compute-1.amazonaws.com",
"ec2-54-235-65-26.compute-1.amazonaws.com",
"ec2-174-129-171-101.compute-1.amazonaws.com",
"ec2-54-234-233-19.compute-1.amazonaws.com",
"ec2-75-101-128-47.compute-1.amazonaws.com",
"ec2-54-234-218-33.compute-1.amazonaws.com",
"ec2-54-226-244-191.compute-1.amazonaws.com",
"ec2-50-19-184-148.compute-1.amazonaws.com",
"ec2-174-129-140-30.compute-1.amazonaws.com",
"ec2-54-227-30-105.compute-1.amazonaws.com",
"ec2-54-226-100-117.compute-1.amazonaws.com",
"ec2-54-224-92-80.compute-1.amazonaws.com",
"ec2-23-21-57-109.compute-1.amazonaws.com",
"ec2-75-101-128-224.compute-1.amazonaws.com",
"ec2-23-21-133-17.compute-1.amazonaws.com",
"ec2-54-211-252-32.compute-1.amazonaws.com",
"ec2-54-234-3-7.compute-1.amazonaws.com",
"ec2-23-21-224-105.compute-1.amazonaws.com",
"ec2-54-242-36-133.compute-1.amazonaws.com",
"ec2-107-22-234-22.compute-1.amazonaws.com",
"ec2-107-22-234-180.compute-1.amazonaws.com",
"ec2-107-22-241-13.compute-1.amazonaws.com",
"ec2-107-22-247-88.compute-1.amazonaws.com",
"ec2-107-22-248-113.compute-1.amazonaws.com",
"ec2-54-243-146-75.compute-1.amazonaws.com",
"ec2-54-235-143-131.compute-1.amazonaws.com",
"ec2-54-235-143-133.compute-1.amazonaws.com",
"ec2-54-235-143-162.compute-1.amazonaws.com",
"ec2-75-101-159-82.compute-1.amazonaws.com",
"ec2-54-225-88-116.compute-1.amazonaws.com",
"ec2-23-23-130-201.compute-1.amazonaws.com",
"ec2-23-23-169-133.compute-1.amazonaws.com",
"ec2-54-225-229-159.compute-1.amazonaws.com",
"ec2-107-22-234-92.compute-1.amazonaws.com",
"ec2-107-20-176-139.compute-1.amazonaws.com",
"dev11-20120311.co735munpzcw.us-east-1.rds.amazonaws.com",
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"us-east-1e": [
"ec2-23-23-170-30.compute-1.amazonaws.com",
"production-db7.co735munpzcw.us-east-1.rds.amazonaws.com",
"production-readonly-db7.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"web-mktg-1": [
"web-mktg-1.co735munpzcw.us-east-1.rds.amazonaws.com"
],
"web1.axialmarket.com": [
"ec2-75-101-157-248.compute-1.amazonaws.com"
],
"web2.axialmarket.com": [
"ec2-75-101-159-82.compute-1.amazonaws.com"
],
"willnew.axialmarket.com": [
"ec2-107-22-234-22.compute-1.amazonaws.com"
],
"worker1new.axialmarket.com": [
"ec2-54-235-143-133.compute-1.amazonaws.com"
],
"worker1newdev.axialmarket.com": [
"ec2-107-22-249-212.compute-1.amazonaws.com"
],
"worker2new.axialmarket.com": [
"ec2-54-235-143-134.compute-1.amazonaws.com"
],
"www-test.axialmarket.com": [
"ec2-54-234-233-19.compute-1.amazonaws.com"
],
'_meta': {
'hostvars': {}
}
}
host_vars = {
}
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--list', action='store_true', dest='list')
parser.add_option('--host', dest='hostname', default='')
options, args = parser.parse_args()
if options.list:
print json.dumps(inv_list, indent=4)
elif options.hostname:
print json.dumps(host_vars, indent=4)
else:
print json.dumps({}, indent=4)

View File

@ -1,63 +0,0 @@
#!/usr/bin/env python
# Python
import json
import optparse
import os
nhosts = int(os.environ.get('NHOSTS', 100))
inv_list = {
'_meta': {
'hostvars': {},
},
}
for n in xrange(nhosts):
hostname = 'host-%08d.example.com' % n
group_evens_odds = 'evens.example.com' if n % 2 == 0 else 'odds.example.com'
group_threes = 'threes.example.com' if n % 3 == 0 else ''
group_fours = 'fours.example.com' if n % 4 == 0 else ''
group_fives = 'fives.example.com' if n % 5 == 0 else ''
group_sixes = 'sixes.example.com' if n % 6 == 0 else ''
group_sevens = 'sevens.example.com' if n % 7 == 0 else ''
group_eights = 'eights.example.com' if n % 8 == 0 else ''
group_nines = 'nines.example.com' if n % 9 == 0 else ''
group_tens = 'tens.example.com' if n % 10 == 0 else ''
group_by_10s = 'group-%07dX.example.com' % (n / 10)
group_by_100s = 'group-%06dXX.example.com' % (n / 100)
group_by_1000s = 'group-%05dXXX.example.com' % (n / 1000)
for group in [group_evens_odds, group_threes, group_fours, group_fives, group_sixes, group_sevens, group_eights, group_nines, group_tens, group_by_10s]:
if not group:
continue
if group in inv_list:
inv_list[group]['hosts'].append(hostname)
else:
inv_list[group] = {'hosts': [hostname], 'children': [], 'vars': {'group_prefix': group.split('.')[0]}}
if group_by_1000s not in inv_list:
inv_list[group_by_1000s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_1000s.split('.')[0]}}
if group_by_100s not in inv_list:
inv_list[group_by_100s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_100s.split('.')[0]}}
if group_by_100s not in inv_list[group_by_1000s]['children']:
inv_list[group_by_1000s]['children'].append(group_by_100s)
if group_by_10s not in inv_list[group_by_100s]['children']:
inv_list[group_by_100s]['children'].append(group_by_10s)
inv_list['_meta']['hostvars'][hostname] = {
'ansible_ssh_user': 'example',
'ansible_connection': 'local',
'host_prefix': hostname.split('.')[0],
'host_id': n,
}
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--list', action='store_true', dest='list')
parser.add_option('--host', dest='hostname', default='')
options, args = parser.parse_args()
if options.list:
print json.dumps(inv_list, indent=4)
elif options.hostname:
print json.dumps(inv_list['_meta']['hostvars'][options.hostname], indent=4)
else:
print json.dumps({}, indent=4)

View File

@ -9,68 +9,86 @@ import mock
from django.core.management.base import CommandError
# AWX
from awx.main.management.commands.inventory_import import (
Command
)
from awx.main.management.commands import inventory_import
from awx.main.models import Inventory, Host, Group
from awx.main.utils.mem_inventory import dict_to_mem_data
TEST_INVENTORY_INI = '''\
# Some comment about blah blah blah...
[webservers]
web1.example.com ansible_ssh_host=w1.example.net
web2.example.com
web3.example.com:1022
[webservers:vars] # Comment on a section
webvar=blah # Comment on an option
[dbservers]
db1.example.com
db2.example.com
[dbservers:vars]
dbvar=ugh
[servers:children]
webservers
dbservers
[servers:vars]
varb=B
[all:vars]
vara=A
[others]
10.11.12.13
10.12.14.16:8022
fe80::1610:9fff:fedd:654b
[fe80::1610:9fff:fedd:b654]:1022
::1
'''
TEST_INVENTORY_CONTENT = {
"_meta": {
"hostvars": {}
},
"all": {
"children": [
"others",
"servers",
"ungrouped"
],
"vars": {
"vara": "A"
}
},
"dbservers": {
"hosts": [
"db1.example.com",
"db2.example.com"
],
"vars": {
"dbvar": "ugh"
}
},
"others": {
"hosts": {
"10.11.12.13": {},
"10.12.14.16": {"ansible_port": 8022},
"::1": {},
"fe80::1610:9fff:fedd:654b": {},
"fe80::1610:9fff:fedd:b654": {"ansible_port": 1022}
}
},
"servers": {
"children": [
"dbservers",
"webservers"
],
"vars": {
"varb": "B"
}
},
"ungrouped": {},
"webservers": {
"hosts": {
"web1.example.com": {
"ansible_ssh_host": "w1.example.net"
},
"web2.example.com": {},
"web3.example.com": {
"ansible_port": 1022
}
},
"vars": {
"webvar": "blah"
}
}
}
@pytest.fixture(scope='session')
def test_dir(tmpdir_factory):
return tmpdir_factory.mktemp('inv_files', numbered=False)
TEST_MEM_OBJECTS = dict_to_mem_data(TEST_INVENTORY_CONTENT)
@pytest.fixture(scope='session')
def ini_file(test_dir):
fn = test_dir.join('test_hosts')
fn.write(TEST_INVENTORY_INI)
return fn
def mock_logging(self):
pass
@pytest.mark.django_db
@mock.patch.object(Command, 'check_license', mock.MagicMock())
@pytest.mark.inventory_import
@mock.patch.object(inventory_import.Command, 'check_license', mock.MagicMock())
@mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging)
class TestInvalidOptionsFunctional:
def test_invalid_options_invalid_source(self, inventory):
# Give invalid file to the command
cmd = Command()
cmd = inventory_import.Command()
with mock.patch('django.db.transaction.rollback'):
with pytest.raises(IOError) as err:
cmd.handle_noargs(
@ -78,28 +96,33 @@ class TestInvalidOptionsFunctional:
source='/tmp/pytest-of-root/pytest-7/inv_files0-invalid')
assert 'Source does not exist' in err.value.message
def test_invalid_inventory_id(self, ini_file):
cmd = Command()
def test_invalid_inventory_id(self):
cmd = inventory_import.Command()
with pytest.raises(CommandError) as err:
cmd.handle_noargs(inventory_id=42, source=ini_file.dirname)
cmd.handle_noargs(inventory_id=42, source='/notapath/shouldnotmatter')
assert 'id = 42' in err.value.message
assert 'cannot be found' in err.value.message
def test_invalid_inventory_name(self, ini_file):
cmd = Command()
def test_invalid_inventory_name(self):
cmd = inventory_import.Command()
with pytest.raises(CommandError) as err:
cmd.handle_noargs(inventory_name='fooservers', source=ini_file.dirname)
cmd.handle_noargs(inventory_name='fooservers', source='/notapath/shouldnotmatter')
assert 'name = fooservers' in err.value.message
assert 'cannot be found' in err.value.message
@pytest.mark.django_db
@mock.patch.object(Command, 'check_license', mock.MagicMock())
@pytest.mark.inventory_import
@mock.patch.object(inventory_import.Command, 'check_license', mock.MagicMock())
@mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging)
class TestINIImports:
def test_inventory_single_ini_import(self, inventory, ini_file, capsys):
cmd = Command()
r = cmd.handle_noargs(inventory_id=inventory.pk, source=ini_file.dirname)
@mock.patch.object(inventory_import.BaseLoader, 'load', mock.MagicMock(return_value=TEST_MEM_OBJECTS))
def test_inventory_single_ini_import(self, inventory, capsys):
cmd = inventory_import.Command()
r = cmd.handle_noargs(
inventory_id=inventory.pk, source=__file__,
method='backport')
out, err = capsys.readouterr()
assert r is None
assert out == ''
@ -117,10 +140,12 @@ class TestINIImports:
reloaded_inv = Inventory.objects.get(pk=inventory.pk)
assert reloaded_inv.variables_dict == {'vara': 'A'}
# Groups vars are applied to host in the newer versions
assert Host.objects.get(name='web1.example.com').variables_dict == {'ansible_ssh_host': 'w1.example.net'}
assert Host.objects.get(name='web3.example.com').variables_dict == {'ansible_ssh_port': 1022}
assert Host.objects.get(name='fe80::1610:9fff:fedd:b654').variables_dict == {'ansible_ssh_port': 1022}
assert Host.objects.get(name='10.12.14.16').variables_dict == {'ansible_ssh_port': 8022}
# Old version uses `ansible_ssh_port` but new version uses `ansible_port`
assert Host.objects.get(name='web3.example.com').variables_dict == {'ansible_port': 1022}
assert Host.objects.get(name='fe80::1610:9fff:fedd:b654').variables_dict == {'ansible_port': 1022}
assert Host.objects.get(name='10.12.14.16').variables_dict == {'ansible_port': 8022}
servers = Group.objects.get(name='servers')
assert servers.variables_dict == {'varb': 'B'}
@ -143,24 +168,53 @@ class TestINIImports:
assert invsrc.inventory_updates.count() == 1
assert invsrc.inventory_updates.first().status == 'successful'
def test_inventory_import_group_vars_file(self, inventory, ini_file, tmpdir_factory):
# Create an extra group_vars file for group webservers
gvarf = tmpdir_factory.mktemp('inv_files/group_vars', numbered=False).join('webservers')
gvarf.write('''webservers_only_variable: foobar\n''')
# Check creation of ad-hoc inventory source - this was not called with one specified
assert reloaded_inv.inventory_sources.count() == 1
assert reloaded_inv.inventory_sources.all()[0].source == 'file'
cmd = Command()
cmd.handle_noargs(inventory_id=inventory.pk, source=ini_file.dirname)
@mock.patch.object(
inventory_import, 'load_inventory_source', mock.MagicMock(
return_value=dict_to_mem_data(
{
"_meta": {
"hostvars": {"foo": {"some_hostvar": "foobar"}}
},
"all": {
"children": ["ungrouped"]
},
"ungrouped": {
"hosts": ["foo"]
}
}).all_group
)
)
def test_hostvars_are_saved(self, inventory):
cmd = inventory_import.Command()
cmd.handle_noargs(inventory_id=inventory.pk, source='doesnt matter')
assert inventory.hosts.count() == 1
h = inventory.hosts.all()[0]
assert h.name == 'foo'
assert h.variables_dict == {"some_hostvar": "foobar"}
servers = Group.objects.get(name='webservers')
assert servers.variables_dict == {'webvar': 'blah', 'webservers_only_variable': 'foobar'}
def test_inventory_import_host_vars_file(self, inventory, ini_file, tmpdir_factory):
# Create an extra host_vars file for one specific host
gvarf = tmpdir_factory.mktemp('inv_files/host_vars', numbered=False).join('web1.example.com')
gvarf.write('''host_only_variable: foobar\n''')
cmd = Command()
cmd.handle_noargs(inventory_id=inventory.pk, source=ini_file.dirname)
Host.objects.get(name='web1.example.com').variables_dict == {
'ansible_ssh_host': 'w1.example.net', 'host_only_variable': 'foobar'}
@mock.patch.object(
inventory_import, 'load_inventory_source', mock.MagicMock(
return_value=dict_to_mem_data(
{
"_meta": {
"hostvars": {}
},
"all": {
"children": ["fooland", "barland"]
},
"fooland": {
"children": ["barland"]
},
"barland": {
"children": ["fooland"]
}
}).all_group
)
)
def test_recursive_group_error(self, inventory):
cmd = inventory_import.Command()
cmd.handle_noargs(inventory_id=inventory.pk, source='doesnt matter')

View File

@ -5,16 +5,12 @@
import json
import os
import shutil
import string
import StringIO
import sys
import tempfile
import time
import urlparse
import unittest2 as unittest
# Django
from django.conf import settings
from django.core.management import call_command
from django.utils.timezone import now
from django.test.utils import override_settings
@ -36,77 +32,6 @@ TEST_PLAYBOOK = '''- hosts: test-group
command: test 2 = 2
'''
TEST_INVENTORY_INI = '''\
# Some comment about blah blah blah...
[webservers]
web1.example.com ansible_ssh_host=w1.example.net
web2.example.com
web3.example.com:1022
[webservers:vars] # Comment on a section
webvar=blah # Comment on an option
[dbservers]
db1.example.com
db2.example.com
[dbservers:vars]
dbvar=ugh
[servers:children]
webservers
dbservers
[servers:vars]
varb=B
[all:vars]
vara=A
[others]
10.11.12.13
10.12.14.16:8022
fe80::1610:9fff:fedd:654b
[fe80::1610:9fff:fedd:b654]:1022
::1
'''
TEST_INVENTORY_INI_WITH_HOST_PATTERNS = '''\
[dotcom]
web[00:63].example.com ansible_ssh_user=example
dns.example.com
[dotnet]
db-[a:z].example.net
ns.example.net
[dotorg]
[A:F][0:9].example.org:1022 ansible_ssh_user=example
mx.example.org
[dotus]
lb[00:08:2].example.us even_odd=even
lb[01:09:2].example.us even_odd=odd
[dotcc]
media[0:9][0:9].example.cc
'''
TEST_INVENTORY_INI_WITH_RECURSIVE_GROUPS = '''\
[family:children]
parent
[parent:children]
child
[child:children]
grandchild
[grandchild:children]
parent
'''
class BaseCommandMixin(object):
'''
@ -449,412 +374,3 @@ class CleanupActivityStreamTest(BaseCommandMixin, BaseTest):
self.assertFalse(count_after)
self.assertTrue(cleanup_elapsed < (create_elapsed / 4),
'create took %0.3fs, cleanup took %0.3fs, expected < %0.3fs' % (create_elapsed, cleanup_elapsed, create_elapsed / 4))
@unittest.skipIf(os.environ.get('SKIP_SLOW_TESTS', False), 'Skipping slow test')
class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
'''
Test cases for inventory_import management command.
'''
def setUp(self):
super(InventoryImportTest, self).setUp()
self.start_rabbit()
self.setup_instances()
self.create_test_inventories()
self.create_test_ini()
self.create_test_license_file()
def tearDown(self):
super(InventoryImportTest, self).tearDown()
self.stop_rabbit()
def create_test_ini(self, inv_dir=None, ini_content=None):
ini_content = ini_content or TEST_INVENTORY_INI
handle, self.ini_path = tempfile.mkstemp(suffix='.txt', dir=inv_dir)
ini_file = os.fdopen(handle, 'w')
ini_file.write(ini_content)
ini_file.close()
self._temp_paths.append(self.ini_path)
def create_test_dir(self, host_names=None, group_names=None, suffix=''):
host_names = host_names or []
group_names = group_names or []
if 'all' not in group_names:
group_names.insert(0, 'all')
self.inv_dir = tempfile.mkdtemp()
self._temp_paths.append(self.inv_dir)
self.create_test_ini(self.inv_dir)
group_vars_dir = os.path.join(self.inv_dir, 'group_vars')
os.makedirs(group_vars_dir)
for group_name in group_names:
if suffix == '.json':
group_vars_content = '''{"test_group_name": "%s"}\n''' % group_name
else:
group_vars_content = '''test_group_name: %s\n''' % group_name
group_vars_file = os.path.join(group_vars_dir, '%s%s' % (group_name, suffix))
file(group_vars_file, 'wb').write(group_vars_content)
if host_names:
host_vars_dir = os.path.join(self.inv_dir, 'host_vars')
os.makedirs(host_vars_dir)
for host_name in host_names:
if suffix == '.json':
host_vars_content = '''{"test_host_name": "%s"}''' % host_name
else:
host_vars_content = '''test_host_name: %s''' % host_name
host_vars_file = os.path.join(host_vars_dir, '%s%s' % (host_name, suffix))
file(host_vars_file, 'wb').write(host_vars_content)
def check_adhoc_inventory_source(self, inventory, except_host_pks=None,
except_group_pks=None):
# Check that management command created a new inventory source and
# related inventory update.
inventory_sources = inventory.inventory_sources.filter(group=None)
self.assertEqual(inventory_sources.count(), 1)
inventory_source = inventory_sources[0]
self.assertEqual(inventory_source.source, 'file')
self.assertEqual(inventory_source.inventory_updates.count(), 1)
inventory_update = inventory_source.inventory_updates.all()[0]
self.assertEqual(inventory_update.status, 'successful')
for host in inventory.hosts.all():
if host.pk in (except_host_pks or []):
continue
source_pks = host.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
for group in inventory.groups.all():
if group.pk in (except_group_pks or []):
continue
source_pks = group.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
def test_dir_with_ini_file(self):
self.create_test_dir(host_names=['db1.example.com', 'db2.example.com'],
group_names=['dbservers'], suffix='')
self.test_ini_file(self.inv_dir)
self.create_test_dir(host_names=['db1.example.com', 'db2.example.com'],
group_names=['dbservers'], suffix='.yml')
self.test_ini_file(self.inv_dir)
self.create_test_dir(host_names=['db1.example.com', 'db2.example.com'],
group_names=['dbservers'], suffix='.yaml')
self.test_ini_file(self.inv_dir)
self.create_test_dir(host_names=['db1.example.com', 'db2.example.com'],
group_names=['dbservers'], suffix='.json')
self.test_ini_file(self.inv_dir)
def test_merge_from_ini_file(self, overwrite=False, overwrite_vars=False):
new_inv_vars = json.dumps({'varc': 'C'})
new_inv = self.organizations[0].inventories.create(name='inv123',
variables=new_inv_vars)
lb_host_vars = json.dumps({'lbvar': 'ni!'})
lb_host = new_inv.hosts.create(name='lb.example.com',
variables=lb_host_vars)
lb_group = new_inv.groups.create(name='lbservers')
servers_group_vars = json.dumps({'vard': 'D'})
servers_group = new_inv.groups.create(name='servers',
variables=servers_group_vars)
servers_group.children.add(lb_group)
lb_group.hosts.add(lb_host)
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path,
overwrite=overwrite,
overwrite_vars=overwrite_vars)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected.
new_inv = Inventory.objects.get(pk=new_inv.pk)
expected_group_names = set(['servers', 'dbservers', 'webservers',
'lbservers', 'others'])
if overwrite:
expected_group_names.remove('lbservers')
group_names = set(new_inv.groups.values_list('name', flat=True))
self.assertEqual(expected_group_names, group_names)
expected_host_names = set(['web1.example.com', 'web2.example.com',
'web3.example.com', 'db1.example.com',
'db2.example.com', 'lb.example.com',
'10.11.12.13', '10.12.14.16',
'fe80::1610:9fff:fedd:654b',
'fe80::1610:9fff:fedd:b654', '::1'])
if overwrite:
expected_host_names.remove('lb.example.com')
host_names = set(new_inv.hosts.values_list('name', flat=True))
self.assertEqual(expected_host_names, host_names)
expected_inv_vars = {'vara': 'A', 'varc': 'C'}
if overwrite_vars:
expected_inv_vars.pop('varc')
self.assertEqual(new_inv.variables_dict, expected_inv_vars)
for host in new_inv.hosts.all():
if host.name == 'web1.example.com':
self.assertEqual(host.variables_dict,
{'ansible_ssh_host': 'w1.example.net'})
elif host.name in ('web3.example.com', 'fe80::1610:9fff:fedd:b654'):
self.assertEqual(host.variables_dict, {'ansible_ssh_port': 1022})
elif host.name == '10.12.14.16':
self.assertEqual(host.variables_dict, {'ansible_ssh_port': 8022})
elif host.name == 'lb.example.com':
self.assertEqual(host.variables_dict, {'lbvar': 'ni!'})
else:
self.assertEqual(host.variables_dict, {})
for group in new_inv.groups.all():
if group.name == 'servers':
expected_vars = {'varb': 'B', 'vard': 'D'}
if overwrite_vars:
expected_vars.pop('vard')
self.assertEqual(group.variables_dict, expected_vars)
children = set(group.children.values_list('name', flat=True))
expected_children = set(['dbservers', 'webservers', 'lbservers'])
if overwrite:
expected_children.remove('lbservers')
self.assertEqual(children, expected_children)
self.assertEqual(group.hosts.count(), 0)
elif group.name == 'dbservers':
self.assertEqual(group.variables_dict, {'dbvar': 'ugh'})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['db1.example.com','db2.example.com'])
self.assertEqual(hosts, host_names)
elif group.name == 'webservers':
self.assertEqual(group.variables_dict, {'webvar': 'blah'})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['web1.example.com','web2.example.com',
'web3.example.com'])
self.assertEqual(hosts, host_names)
elif group.name == 'lbservers':
self.assertEqual(group.variables_dict, {})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['lb.example.com'])
self.assertEqual(hosts, host_names)
if overwrite:
except_host_pks = set()
except_group_pks = set()
else:
except_host_pks = set([lb_host.pk])
except_group_pks = set([lb_group.pk])
self.check_adhoc_inventory_source(new_inv, except_host_pks,
except_group_pks)
def test_overwrite_vars_from_ini_file(self):
self.test_merge_from_ini_file(overwrite_vars=True)
def test_overwrite_from_ini_file(self):
self.test_merge_from_ini_file(overwrite=True)
def test_ini_file_with_host_patterns(self):
self.create_test_ini(ini_content=TEST_INVENTORY_INI_WITH_HOST_PATTERNS)
# New empty inventory.
new_inv = self.organizations[0].inventories.create(name='newb')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected.
new_inv = Inventory.objects.get(pk=new_inv.pk)
expected_group_names = set(['dotcom', 'dotnet', 'dotorg', 'dotus', 'dotcc'])
group_names = set(new_inv.groups.values_list('name', flat=True))
self.assertEqual(expected_group_names, group_names)
# Check that all host ranges are expanded into host names.
expected_host_names = set()
expected_host_names.update(['web%02d.example.com' % x for x in xrange(64)])
expected_host_names.add('dns.example.com')
expected_host_names.update(['db-%s.example.net' % x for x in string.ascii_lowercase])
expected_host_names.add('ns.example.net')
for x in 'ABCDEF':
for y in xrange(10):
expected_host_names.add('%s%d.example.org' % (x, y))
expected_host_names.add('mx.example.org')
expected_host_names.update(['lb%02d.example.us' % x for x in xrange(10)])
expected_host_names.update(['media%02d.example.cc' % x for x in xrange(100)])
host_names = set(new_inv.hosts.values_list('name', flat=True))
self.assertEqual(expected_host_names, host_names)
# Check hosts in dotcom group.
group = new_inv.groups.get(name='dotcom')
self.assertEqual(group.hosts.count(), 65)
for host in group.hosts.filter( name__startswith='web'):
self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example')
# Check hosts in dotnet group.
group = new_inv.groups.get(name='dotnet')
self.assertEqual(group.hosts.count(), 27)
# Check hosts in dotorg group.
group = new_inv.groups.get(name='dotorg')
self.assertEqual(group.hosts.count(), 61)
for host in group.hosts.all():
if host.name.startswith('mx.'):
continue
self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example')
self.assertEqual(host.variables_dict.get('ansible_ssh_port', 22), 1022)
# Check hosts in dotus group.
group = new_inv.groups.get(name='dotus')
self.assertEqual(group.hosts.count(), 10)
for host in group.hosts.all():
if int(host.name[2:4]) % 2 == 0:
self.assertEqual(host.variables_dict.get('even_odd', ''), 'even')
else:
self.assertEqual(host.variables_dict.get('even_odd', ''), 'odd')
# Check hosts in dotcc group.
group = new_inv.groups.get(name='dotcc')
self.assertEqual(group.hosts.count(), 100)
# Check inventory source/update after running command.
self.check_adhoc_inventory_source(new_inv)
# Test with invalid host pattern -- alpha begin > end.
self.create_test_ini(ini_content='[invalid]\nhost[X:P]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
# Test with invalid host pattern -- different numeric pattern lengths.
self.create_test_ini(ini_content='[invalid]\nhost[001:08]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
# Test with invalid host pattern -- invalid range/slice spec.
self.create_test_ini(ini_content='[invalid]\nhost[1:2:3:4]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
# Test with invalid host pattern -- no begin.
self.create_test_ini(ini_content='[invalid]\nhost[:9]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
# Test with invalid host pattern -- no end.
self.create_test_ini(ini_content='[invalid]\nhost[0:]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
# Test with invalid host pattern -- invalid slice.
self.create_test_ini(ini_content='[invalid]\nhost[0:9:Q]')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertTrue(isinstance(result, ValueError), result)
def test_ini_file_with_recursive_groups(self):
self.create_test_ini(ini_content=TEST_INVENTORY_INI_WITH_RECURSIVE_GROUPS)
new_inv = self.organizations[0].inventories.create(name='new')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path)
self.assertEqual(result, None, stdout + stderr)
def test_executable_file(self):
# Use existing inventory as source.
old_inv = self.inventories[1]
# Modify host name to contain brackets (AC-1295).
old_host = old_inv.hosts.all()[0]
old_host.name = '[hey look some brackets]'
old_host.save()
# New empty inventory.
new_inv = self.organizations[0].inventories.create(name='newb')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
# Use our own inventory script as executable file.
rest_api_url = self.live_server_url
parts = urlparse.urlsplit(rest_api_url)
username, password = self.get_super_credentials()
netloc = '%s:%s@%s' % (username, password, parts.netloc)
rest_api_url = urlparse.urlunsplit([parts.scheme, netloc, parts.path,
parts.query, parts.fragment])
os.environ.setdefault('REST_API_URL', rest_api_url)
os.environ['INVENTORY_ID'] = str(old_inv.pk)
source = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'plugins',
'inventory', 'awxrest.py')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=source)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected.
new_inv = Inventory.objects.get(pk=new_inv.pk)
self.assertEqual(old_inv.variables_dict, new_inv.variables_dict)
old_groups = set(old_inv.groups.values_list('name', flat=True))
new_groups = set(new_inv.groups.values_list('name', flat=True))
self.assertEqual(old_groups, new_groups)
old_hosts = set(old_inv.hosts.values_list('name', flat=True))
new_hosts = set(new_inv.hosts.values_list('name', flat=True))
self.assertEqual(old_hosts, new_hosts)
for new_host in new_inv.hosts.all():
old_host = old_inv.hosts.get(name=new_host.name)
self.assertEqual(old_host.variables_dict, new_host.variables_dict)
for new_group in new_inv.groups.all():
old_group = old_inv.groups.get(name=new_group.name)
self.assertEqual(old_group.variables_dict, new_group.variables_dict)
old_children = set(old_group.children.values_list('name', flat=True))
new_children = set(new_group.children.values_list('name', flat=True))
self.assertEqual(old_children, new_children)
old_hosts = set(old_group.hosts.values_list('name', flat=True))
new_hosts = set(new_group.hosts.values_list('name', flat=True))
self.assertEqual(old_hosts, new_hosts)
self.check_adhoc_inventory_source(new_inv)
def test_executable_file_with_meta_hostvars(self):
os.environ['INVENTORY_HOSTVARS'] = '1'
self.test_executable_file()
def test_large_executable_file(self):
new_inv = self.organizations[0].inventories.create(name='newec2')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
os.chdir(os.path.join(os.path.dirname(__file__), '..', '..', 'data'))
inv_file = 'large_ec2_inventory.py'
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=inv_file)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected within a reasonable
# amount of time. Computed fields should also be updated.
new_inv = Inventory.objects.get(pk=new_inv.pk)
self.assertNotEqual(new_inv.hosts.count(), 0)
self.assertNotEqual(new_inv.groups.count(), 0)
self.assertNotEqual(new_inv.total_hosts, 0)
self.assertNotEqual(new_inv.total_groups, 0)
self.assertElapsedLessThan(60)
def _get_ngroups_for_nhosts(self, n):
if n > 0:
return min(n, 10) + ((n - 1) / 10 + 1) + ((n - 1) / 100 + 1) + ((n - 1) / 1000 + 1)
else:
return 0
def _check_largeinv_import(self, new_inv, nhosts):
self._start_time = time.time()
inv_file = os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'largeinv.py')
ngroups = self._get_ngroups_for_nhosts(nhosts)
os.environ['NHOSTS'] = str(nhosts)
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=inv_file,
overwrite=True, verbosity=0)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected within a reasonable
# amount of time. Computed fields should also be updated.
new_inv = Inventory.objects.get(pk=new_inv.pk)
self.assertEqual(new_inv.hosts.count(), nhosts)
self.assertEqual(new_inv.groups.count(), ngroups)
self.assertEqual(new_inv.total_hosts, nhosts)
self.assertEqual(new_inv.total_groups, ngroups)
self.assertElapsedLessThan(120)
@unittest.skipIf(getattr(settings, 'LOCAL_DEVELOPMENT', False),
'Skip this test in local development environments, '
'which may vary widely on memory.')
def test_large_inventory_file(self):
new_inv = self.organizations[0].inventories.create(name='largeinv')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
nhosts = 2000
# Test initial import into empty inventory.
self._check_largeinv_import(new_inv, nhosts)
# Test re-importing and overwriting.
self._check_largeinv_import(new_inv, nhosts)
# Test re-importing with only half as many hosts.
self._check_largeinv_import(new_inv, nhosts / 2)
# Test re-importing that clears all hosts.
self._check_largeinv_import(new_inv, 0)

View File

@ -13,6 +13,7 @@ from awx.main.management.commands.inventory_import import (
)
@pytest.mark.inventory_import
class TestInvalidOptions:
def test_invalid_options_no_options_specified(self):